ACodec.cpp revision fc33e4017e2adaeacb0c4deffc762c612d70f256
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47 48#include <OMX_AudioExt.h> 49#include <OMX_VideoExt.h> 50#include <OMX_Component.h> 51#include <OMX_IndexExt.h> 52#include <OMX_AsString.h> 53 54#include "include/avc_utils.h" 55#include "include/DataConverter.h" 56#include "omx/OMXUtils.h" 57 58namespace android { 59 60enum { 61 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 62}; 63 64// OMX errors are directly mapped into status_t range if 65// there is no corresponding MediaError status code. 66// Use the statusFromOMXError(int32_t omxError) function. 67// 68// Currently this is a direct map. 69// See frameworks/native/include/media/openmax/OMX_Core.h 70// 71// Vendor OMX errors from 0x90000000 - 0x9000FFFF 72// Extension OMX errors from 0x8F000000 - 0x90000000 73// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 74// 75 76// returns true if err is a recognized OMX error code. 77// as OMX error is OMX_S32, this is an int32_t type 78static inline bool isOMXError(int32_t err) { 79 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 80} 81 82// converts an OMX error to a status_t 83static inline status_t statusFromOMXError(int32_t omxError) { 84 switch (omxError) { 85 case OMX_ErrorInvalidComponentName: 86 case OMX_ErrorComponentNotFound: 87 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 88 default: 89 return isOMXError(omxError) ? omxError : 0; // no translation required 90 } 91} 92 93// checks and converts status_t to a non-side-effect status_t 94static inline status_t makeNoSideEffectStatus(status_t err) { 95 switch (err) { 96 // the following errors have side effects and may come 97 // from other code modules. Remap for safety reasons. 98 case INVALID_OPERATION: 99 case DEAD_OBJECT: 100 return UNKNOWN_ERROR; 101 default: 102 return err; 103 } 104} 105 106struct MessageList : public RefBase { 107 MessageList() { 108 } 109 virtual ~MessageList() { 110 } 111 std::list<sp<AMessage> > &getList() { return mList; } 112private: 113 std::list<sp<AMessage> > mList; 114 115 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 116}; 117 118static sp<DataConverter> getCopyConverter() { 119 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited 120 static sp<DataConverter> sCopyConverter; // zero-inited 121 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); 122 return sCopyConverter; 123} 124 125struct CodecObserver : public BnOMXObserver { 126 CodecObserver() {} 127 128 void setNotificationMessage(const sp<AMessage> &msg) { 129 mNotify = msg; 130 } 131 132 // from IOMXObserver 133 virtual void onMessages(const std::list<omx_message> &messages) { 134 if (messages.empty()) { 135 return; 136 } 137 138 sp<AMessage> notify = mNotify->dup(); 139 bool first = true; 140 sp<MessageList> msgList = new MessageList(); 141 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 142 it != messages.cend(); ++it) { 143 const omx_message &omx_msg = *it; 144 if (first) { 145 notify->setInt32("node", omx_msg.node); 146 first = false; 147 } 148 149 sp<AMessage> msg = new AMessage; 150 msg->setInt32("type", omx_msg.type); 151 switch (omx_msg.type) { 152 case omx_message::EVENT: 153 { 154 msg->setInt32("event", omx_msg.u.event_data.event); 155 msg->setInt32("data1", omx_msg.u.event_data.data1); 156 msg->setInt32("data2", omx_msg.u.event_data.data2); 157 break; 158 } 159 160 case omx_message::EMPTY_BUFFER_DONE: 161 { 162 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 163 msg->setInt32("fence_fd", omx_msg.fenceFd); 164 break; 165 } 166 167 case omx_message::FILL_BUFFER_DONE: 168 { 169 msg->setInt32( 170 "buffer", omx_msg.u.extended_buffer_data.buffer); 171 msg->setInt32( 172 "range_offset", 173 omx_msg.u.extended_buffer_data.range_offset); 174 msg->setInt32( 175 "range_length", 176 omx_msg.u.extended_buffer_data.range_length); 177 msg->setInt32( 178 "flags", 179 omx_msg.u.extended_buffer_data.flags); 180 msg->setInt64( 181 "timestamp", 182 omx_msg.u.extended_buffer_data.timestamp); 183 msg->setInt32( 184 "fence_fd", omx_msg.fenceFd); 185 break; 186 } 187 188 case omx_message::FRAME_RENDERED: 189 { 190 msg->setInt64( 191 "media_time_us", omx_msg.u.render_data.timestamp); 192 msg->setInt64( 193 "system_nano", omx_msg.u.render_data.nanoTime); 194 break; 195 } 196 197 default: 198 ALOGE("Unrecognized message type: %d", omx_msg.type); 199 break; 200 } 201 msgList->getList().push_back(msg); 202 } 203 notify->setObject("messages", msgList); 204 notify->post(); 205 } 206 207protected: 208 virtual ~CodecObserver() {} 209 210private: 211 sp<AMessage> mNotify; 212 213 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 214}; 215 216//////////////////////////////////////////////////////////////////////////////// 217 218struct ACodec::BaseState : public AState { 219 explicit BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 220 221protected: 222 enum PortMode { 223 KEEP_BUFFERS, 224 RESUBMIT_BUFFERS, 225 FREE_BUFFERS, 226 }; 227 228 ACodec *mCodec; 229 230 virtual PortMode getPortMode(OMX_U32 portIndex); 231 232 virtual bool onMessageReceived(const sp<AMessage> &msg); 233 234 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 235 236 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 237 virtual void onInputBufferFilled(const sp<AMessage> &msg); 238 239 void postFillThisBuffer(BufferInfo *info); 240 241private: 242 // Handles an OMX message. Returns true iff message was handled. 243 bool onOMXMessage(const sp<AMessage> &msg); 244 245 // Handles a list of messages. Returns true iff messages were handled. 246 bool onOMXMessageList(const sp<AMessage> &msg); 247 248 // returns true iff this message is for this component and the component is alive 249 bool checkOMXMessage(const sp<AMessage> &msg); 250 251 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 252 253 bool onOMXFillBufferDone( 254 IOMX::buffer_id bufferID, 255 size_t rangeOffset, size_t rangeLength, 256 OMX_U32 flags, 257 int64_t timeUs, 258 int fenceFd); 259 260 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 261 262 void getMoreInputDataIfPossible(); 263 264 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 265}; 266 267//////////////////////////////////////////////////////////////////////////////// 268 269struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 270 explicit DeathNotifier(const sp<AMessage> ¬ify) 271 : mNotify(notify) { 272 } 273 274 virtual void binderDied(const wp<IBinder> &) { 275 mNotify->post(); 276 } 277 278protected: 279 virtual ~DeathNotifier() {} 280 281private: 282 sp<AMessage> mNotify; 283 284 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 285}; 286 287struct ACodec::UninitializedState : public ACodec::BaseState { 288 explicit UninitializedState(ACodec *codec); 289 290protected: 291 virtual bool onMessageReceived(const sp<AMessage> &msg); 292 virtual void stateEntered(); 293 294private: 295 void onSetup(const sp<AMessage> &msg); 296 bool onAllocateComponent(const sp<AMessage> &msg); 297 298 sp<DeathNotifier> mDeathNotifier; 299 300 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 301}; 302 303//////////////////////////////////////////////////////////////////////////////// 304 305struct ACodec::LoadedState : public ACodec::BaseState { 306 explicit LoadedState(ACodec *codec); 307 308protected: 309 virtual bool onMessageReceived(const sp<AMessage> &msg); 310 virtual void stateEntered(); 311 312private: 313 friend struct ACodec::UninitializedState; 314 315 bool onConfigureComponent(const sp<AMessage> &msg); 316 void onCreateInputSurface(const sp<AMessage> &msg); 317 void onSetInputSurface(const sp<AMessage> &msg); 318 void onStart(); 319 void onShutdown(bool keepComponentAllocated); 320 321 status_t setupInputSurface(); 322 323 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 324}; 325 326//////////////////////////////////////////////////////////////////////////////// 327 328struct ACodec::LoadedToIdleState : public ACodec::BaseState { 329 explicit LoadedToIdleState(ACodec *codec); 330 331protected: 332 virtual bool onMessageReceived(const sp<AMessage> &msg); 333 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 334 virtual void stateEntered(); 335 336private: 337 status_t allocateBuffers(); 338 339 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 340}; 341 342//////////////////////////////////////////////////////////////////////////////// 343 344struct ACodec::IdleToExecutingState : public ACodec::BaseState { 345 explicit IdleToExecutingState(ACodec *codec); 346 347protected: 348 virtual bool onMessageReceived(const sp<AMessage> &msg); 349 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 350 virtual void stateEntered(); 351 352private: 353 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 354}; 355 356//////////////////////////////////////////////////////////////////////////////// 357 358struct ACodec::ExecutingState : public ACodec::BaseState { 359 explicit ExecutingState(ACodec *codec); 360 361 void submitRegularOutputBuffers(); 362 void submitOutputMetaBuffers(); 363 void submitOutputBuffers(); 364 365 // Submit output buffers to the decoder, submit input buffers to client 366 // to fill with data. 367 void resume(); 368 369 // Returns true iff input and output buffers are in play. 370 bool active() const { return mActive; } 371 372protected: 373 virtual PortMode getPortMode(OMX_U32 portIndex); 374 virtual bool onMessageReceived(const sp<AMessage> &msg); 375 virtual void stateEntered(); 376 377 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 378 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 379 380private: 381 bool mActive; 382 383 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 384}; 385 386//////////////////////////////////////////////////////////////////////////////// 387 388struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 389 explicit OutputPortSettingsChangedState(ACodec *codec); 390 391protected: 392 virtual PortMode getPortMode(OMX_U32 portIndex); 393 virtual bool onMessageReceived(const sp<AMessage> &msg); 394 virtual void stateEntered(); 395 396 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 397 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 398 399private: 400 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 401}; 402 403//////////////////////////////////////////////////////////////////////////////// 404 405struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 406 explicit ExecutingToIdleState(ACodec *codec); 407 408protected: 409 virtual bool onMessageReceived(const sp<AMessage> &msg); 410 virtual void stateEntered(); 411 412 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 413 414 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 415 virtual void onInputBufferFilled(const sp<AMessage> &msg); 416 417private: 418 void changeStateIfWeOwnAllBuffers(); 419 420 bool mComponentNowIdle; 421 422 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 423}; 424 425//////////////////////////////////////////////////////////////////////////////// 426 427struct ACodec::IdleToLoadedState : public ACodec::BaseState { 428 explicit IdleToLoadedState(ACodec *codec); 429 430protected: 431 virtual bool onMessageReceived(const sp<AMessage> &msg); 432 virtual void stateEntered(); 433 434 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 435 436private: 437 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 438}; 439 440//////////////////////////////////////////////////////////////////////////////// 441 442struct ACodec::FlushingState : public ACodec::BaseState { 443 explicit FlushingState(ACodec *codec); 444 445protected: 446 virtual bool onMessageReceived(const sp<AMessage> &msg); 447 virtual void stateEntered(); 448 449 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 450 451 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 452 virtual void onInputBufferFilled(const sp<AMessage> &msg); 453 454private: 455 bool mFlushComplete[2]; 456 457 void changeStateIfWeOwnAllBuffers(); 458 459 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 460}; 461 462//////////////////////////////////////////////////////////////////////////////// 463 464void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 465 if (mFenceFd >= 0) { 466 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 467 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 468 } 469 mFenceFd = fenceFd; 470 mIsReadFence = false; 471} 472 473void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 474 if (mFenceFd >= 0) { 475 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 476 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 477 } 478 mFenceFd = fenceFd; 479 mIsReadFence = true; 480} 481 482void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 483 if (mFenceFd >= 0 && mIsReadFence) { 484 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 485 } 486} 487 488void ACodec::BufferInfo::checkReadFence(const char *dbg) { 489 if (mFenceFd >= 0 && !mIsReadFence) { 490 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 491 } 492} 493 494//////////////////////////////////////////////////////////////////////////////// 495 496ACodec::ACodec() 497 : mSampleRate(0), 498 mQuirks(0), 499 mNode(0), 500 mUsingNativeWindow(false), 501 mNativeWindowUsageBits(0), 502 mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN), 503 mIsVideo(false), 504 mIsEncoder(false), 505 mFatalError(false), 506 mShutdownInProgress(false), 507 mExplicitShutdown(false), 508 mIsLegacyVP9Decoder(false), 509 mEncoderDelay(0), 510 mEncoderPadding(0), 511 mRotationDegrees(0), 512 mChannelMaskPresent(false), 513 mChannelMask(0), 514 mDequeueCounter(0), 515 mInputMetadataType(kMetadataBufferTypeInvalid), 516 mOutputMetadataType(kMetadataBufferTypeInvalid), 517 mLegacyAdaptiveExperiment(false), 518 mMetadataBuffersToSubmit(0), 519 mNumUndequeuedBuffers(0), 520 mRepeatFrameDelayUs(-1ll), 521 mMaxPtsGapUs(-1ll), 522 mMaxFps(-1), 523 mTimePerFrameUs(-1ll), 524 mTimePerCaptureUs(-1ll), 525 mCreateInputBuffersSuspended(false), 526 mTunneled(false), 527 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0), 528 mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0) { 529 mUninitializedState = new UninitializedState(this); 530 mLoadedState = new LoadedState(this); 531 mLoadedToIdleState = new LoadedToIdleState(this); 532 mIdleToExecutingState = new IdleToExecutingState(this); 533 mExecutingState = new ExecutingState(this); 534 535 mOutputPortSettingsChangedState = 536 new OutputPortSettingsChangedState(this); 537 538 mExecutingToIdleState = new ExecutingToIdleState(this); 539 mIdleToLoadedState = new IdleToLoadedState(this); 540 mFlushingState = new FlushingState(this); 541 542 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 543 mInputEOSResult = OK; 544 545 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 546 547 changeState(mUninitializedState); 548} 549 550ACodec::~ACodec() { 551} 552 553void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 554 mNotify = msg; 555} 556 557void ACodec::initiateSetup(const sp<AMessage> &msg) { 558 msg->setWhat(kWhatSetup); 559 msg->setTarget(this); 560 msg->post(); 561} 562 563void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 564 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 565 msg->setMessage("params", params); 566 msg->post(); 567} 568 569void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 570 msg->setWhat(kWhatAllocateComponent); 571 msg->setTarget(this); 572 msg->post(); 573} 574 575void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 576 msg->setWhat(kWhatConfigureComponent); 577 msg->setTarget(this); 578 msg->post(); 579} 580 581status_t ACodec::setSurface(const sp<Surface> &surface) { 582 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 583 msg->setObject("surface", surface); 584 585 sp<AMessage> response; 586 status_t err = msg->postAndAwaitResponse(&response); 587 588 if (err == OK) { 589 (void)response->findInt32("err", &err); 590 } 591 return err; 592} 593 594void ACodec::initiateCreateInputSurface() { 595 (new AMessage(kWhatCreateInputSurface, this))->post(); 596} 597 598void ACodec::initiateSetInputSurface( 599 const sp<PersistentSurface> &surface) { 600 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 601 msg->setObject("input-surface", surface); 602 msg->post(); 603} 604 605void ACodec::signalEndOfInputStream() { 606 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 607} 608 609void ACodec::initiateStart() { 610 (new AMessage(kWhatStart, this))->post(); 611} 612 613void ACodec::signalFlush() { 614 ALOGV("[%s] signalFlush", mComponentName.c_str()); 615 (new AMessage(kWhatFlush, this))->post(); 616} 617 618void ACodec::signalResume() { 619 (new AMessage(kWhatResume, this))->post(); 620} 621 622void ACodec::initiateShutdown(bool keepComponentAllocated) { 623 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 624 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 625 msg->post(); 626 if (!keepComponentAllocated) { 627 // ensure shutdown completes in 3 seconds 628 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 629 } 630} 631 632void ACodec::signalRequestIDRFrame() { 633 (new AMessage(kWhatRequestIDRFrame, this))->post(); 634} 635 636// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 637// Some codecs may return input buffers before having them processed. 638// This causes a halt if we already signaled an EOS on the input 639// port. For now keep submitting an output buffer if there was an 640// EOS on the input port, but not yet on the output port. 641void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 642 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 643 mMetadataBuffersToSubmit > 0) { 644 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 645 } 646} 647 648status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 649 // allow keeping unset surface 650 if (surface == NULL) { 651 if (mNativeWindow != NULL) { 652 ALOGW("cannot unset a surface"); 653 return INVALID_OPERATION; 654 } 655 return OK; 656 } 657 658 // cannot switch from bytebuffers to surface 659 if (mNativeWindow == NULL) { 660 ALOGW("component was not configured with a surface"); 661 return INVALID_OPERATION; 662 } 663 664 ANativeWindow *nativeWindow = surface.get(); 665 // if we have not yet started the codec, we can simply set the native window 666 if (mBuffers[kPortIndexInput].size() == 0) { 667 mNativeWindow = surface; 668 return OK; 669 } 670 671 // we do not support changing a tunneled surface after start 672 if (mTunneled) { 673 ALOGW("cannot change tunneled surface"); 674 return INVALID_OPERATION; 675 } 676 677 int usageBits = 0; 678 // no need to reconnect as we will not dequeue all buffers 679 status_t err = setupNativeWindowSizeFormatAndUsage( 680 nativeWindow, &usageBits, 681 !storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment /* reconnect */); 682 if (err != OK) { 683 return err; 684 } 685 686 int ignoredFlags = kVideoGrallocUsage; 687 // New output surface is not allowed to add new usage flag except ignored ones. 688 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 689 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 690 return BAD_VALUE; 691 } 692 693 // get min undequeued count. We cannot switch to a surface that has a higher 694 // undequeued count than we allocated. 695 int minUndequeuedBuffers = 0; 696 err = nativeWindow->query( 697 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 698 &minUndequeuedBuffers); 699 if (err != 0) { 700 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 701 strerror(-err), -err); 702 return err; 703 } 704 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 705 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 706 minUndequeuedBuffers, mNumUndequeuedBuffers); 707 return BAD_VALUE; 708 } 709 710 // we cannot change the number of output buffers while OMX is running 711 // set up surface to the same count 712 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 713 ALOGV("setting up surface for %zu buffers", buffers.size()); 714 715 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 716 if (err != 0) { 717 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 718 -err); 719 return err; 720 } 721 722 // need to enable allocation when attaching 723 surface->getIGraphicBufferProducer()->allowAllocation(true); 724 725 // for meta data mode, we move dequeud buffers to the new surface. 726 // for non-meta mode, we must move all registered buffers 727 for (size_t i = 0; i < buffers.size(); ++i) { 728 const BufferInfo &info = buffers[i]; 729 // skip undequeued buffers for meta data mode 730 if (storingMetadataInDecodedBuffers() 731 && !mLegacyAdaptiveExperiment 732 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 733 ALOGV("skipping buffer"); 734 continue; 735 } 736 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 737 738 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 739 if (err != OK) { 740 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 741 info.mGraphicBuffer->getNativeBuffer(), 742 strerror(-err), -err); 743 return err; 744 } 745 } 746 747 // cancel undequeued buffers to new surface 748 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 749 for (size_t i = 0; i < buffers.size(); ++i) { 750 BufferInfo &info = buffers.editItemAt(i); 751 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 752 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 753 err = nativeWindow->cancelBuffer( 754 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 755 info.mFenceFd = -1; 756 if (err != OK) { 757 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 758 info.mGraphicBuffer->getNativeBuffer(), 759 strerror(-err), -err); 760 return err; 761 } 762 } 763 } 764 // disallow further allocation 765 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 766 } 767 768 // push blank buffers to previous window if requested 769 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 770 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 771 } 772 773 mNativeWindow = nativeWindow; 774 mNativeWindowUsageBits = usageBits; 775 return OK; 776} 777 778status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 779 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 780 781 CHECK(mDealer[portIndex] == NULL); 782 CHECK(mBuffers[portIndex].isEmpty()); 783 784 status_t err; 785 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 786 if (storingMetadataInDecodedBuffers()) { 787 err = allocateOutputMetadataBuffers(); 788 } else { 789 err = allocateOutputBuffersFromNativeWindow(); 790 } 791 } else { 792 OMX_PARAM_PORTDEFINITIONTYPE def; 793 InitOMXParams(&def); 794 def.nPortIndex = portIndex; 795 796 err = mOMX->getParameter( 797 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 798 799 if (err == OK) { 800 MetadataBufferType type = 801 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 802 size_t bufSize = def.nBufferSize; 803 if (type == kMetadataBufferTypeANWBuffer) { 804 bufSize = sizeof(VideoNativeMetadata); 805 } else if (type == kMetadataBufferTypeNativeHandleSource) { 806 bufSize = sizeof(VideoNativeHandleMetadata); 807 } 808 809 // If using gralloc or native source input metadata buffers, allocate largest 810 // metadata size as we prefer to generate native source metadata, but component 811 // may require gralloc source. For camera source, allocate at least enough 812 // size for native metadata buffers. 813 size_t allottedSize = bufSize; 814 if (portIndex == kPortIndexInput && type == kMetadataBufferTypeANWBuffer) { 815 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 816 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 817 bufSize = max(bufSize, sizeof(VideoNativeMetadata)); 818 } 819 820 size_t conversionBufferSize = 0; 821 822 sp<DataConverter> converter = mConverter[portIndex]; 823 if (converter != NULL) { 824 // here we assume sane conversions of max 4:1, so result fits in int32 825 if (portIndex == kPortIndexInput) { 826 conversionBufferSize = converter->sourceSize(bufSize); 827 } else { 828 conversionBufferSize = converter->targetSize(bufSize); 829 } 830 } 831 832 size_t alignment = MemoryDealer::getAllocationAlignment(); 833 834 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", 835 mComponentName.c_str(), 836 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 837 portIndex == kPortIndexInput ? "input" : "output"); 838 839 // verify buffer sizes to avoid overflow in align() 840 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { 841 ALOGE("b/22885421"); 842 return NO_MEMORY; 843 } 844 845 // don't modify bufSize as OMX may not expect it to increase after negotiation 846 size_t alignedSize = align(bufSize, alignment); 847 size_t alignedConvSize = align(conversionBufferSize, alignment); 848 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { 849 ALOGE("b/22885421"); 850 return NO_MEMORY; 851 } 852 853 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); 854 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 855 856 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 857 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 858 if (mem == NULL || mem->pointer() == NULL) { 859 return NO_MEMORY; 860 } 861 862 BufferInfo info; 863 info.mStatus = BufferInfo::OWNED_BY_US; 864 info.mFenceFd = -1; 865 info.mRenderInfo = NULL; 866 info.mNativeHandle = NULL; 867 868 uint32_t requiresAllocateBufferBit = 869 (portIndex == kPortIndexInput) 870 ? kRequiresAllocateBufferOnInputPorts 871 : kRequiresAllocateBufferOnOutputPorts; 872 873 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 874 mem.clear(); 875 876 void *ptr = NULL; 877 sp<NativeHandle> native_handle; 878 err = mOMX->allocateSecureBuffer( 879 mNode, portIndex, bufSize, &info.mBufferID, 880 &ptr, &native_handle); 881 882 // TRICKY: this representation is unorthodox, but ACodec requires 883 // an ABuffer with a proper size to validate range offsets and lengths. 884 // Since mData is never referenced for secure input, it is used to store 885 // either the pointer to the secure buffer, or the opaque handle as on 886 // some devices ptr is actually an opaque handle, not a pointer. 887 888 // TRICKY2: use native handle as the base of the ABuffer if received one, 889 // because Widevine source only receives these base addresses. 890 const native_handle_t *native_handle_ptr = 891 native_handle == NULL ? NULL : native_handle->handle(); 892 info.mData = new ABuffer( 893 ptr != NULL ? ptr : (void *)native_handle_ptr, bufSize); 894 info.mNativeHandle = native_handle; 895 info.mCodecData = info.mData; 896 } else if (mQuirks & requiresAllocateBufferBit) { 897 err = mOMX->allocateBufferWithBackup( 898 mNode, portIndex, mem, &info.mBufferID, allottedSize); 899 } else { 900 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize); 901 } 902 903 if (mem != NULL) { 904 info.mCodecData = new ABuffer(mem->pointer(), bufSize); 905 info.mCodecRef = mem; 906 907 if (type == kMetadataBufferTypeANWBuffer) { 908 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 909 } 910 911 // if we require conversion, allocate conversion buffer for client use; 912 // otherwise, reuse codec buffer 913 if (mConverter[portIndex] != NULL) { 914 CHECK_GT(conversionBufferSize, (size_t)0); 915 mem = mDealer[portIndex]->allocate(conversionBufferSize); 916 if (mem == NULL|| mem->pointer() == NULL) { 917 return NO_MEMORY; 918 } 919 info.mData = new ABuffer(mem->pointer(), conversionBufferSize); 920 info.mMemRef = mem; 921 } else { 922 info.mData = info.mCodecData; 923 info.mMemRef = info.mCodecRef; 924 } 925 } 926 927 mBuffers[portIndex].push(info); 928 } 929 } 930 } 931 932 if (err != OK) { 933 return err; 934 } 935 936 sp<AMessage> notify = mNotify->dup(); 937 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 938 939 notify->setInt32("portIndex", portIndex); 940 941 sp<PortDescription> desc = new PortDescription; 942 943 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 944 const BufferInfo &info = mBuffers[portIndex][i]; 945 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef); 946 } 947 948 notify->setObject("portDesc", desc); 949 notify->post(); 950 951 return OK; 952} 953 954status_t ACodec::setupNativeWindowSizeFormatAndUsage( 955 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */, 956 bool reconnect) { 957 OMX_PARAM_PORTDEFINITIONTYPE def; 958 InitOMXParams(&def); 959 def.nPortIndex = kPortIndexOutput; 960 961 status_t err = mOMX->getParameter( 962 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 963 964 if (err != OK) { 965 return err; 966 } 967 968 OMX_U32 usage = 0; 969 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 970 if (err != 0) { 971 ALOGW("querying usage flags from OMX IL component failed: %d", err); 972 // XXX: Currently this error is logged, but not fatal. 973 usage = 0; 974 } 975 int omxUsage = usage; 976 977 if (mFlags & kFlagIsGrallocUsageProtected) { 978 usage |= GRALLOC_USAGE_PROTECTED; 979 } 980 981 usage |= kVideoGrallocUsage; 982 *finalUsage = usage; 983 984 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 985 mLastNativeWindowDataSpace = HAL_DATASPACE_UNKNOWN; 986 987 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 988 return setNativeWindowSizeFormatAndUsage( 989 nativeWindow, 990 def.format.video.nFrameWidth, 991 def.format.video.nFrameHeight, 992 def.format.video.eColorFormat, 993 mRotationDegrees, 994 usage, 995 reconnect); 996} 997 998status_t ACodec::configureOutputBuffersFromNativeWindow( 999 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 1000 OMX_U32 *minUndequeuedBuffers, bool preregister) { 1001 1002 OMX_PARAM_PORTDEFINITIONTYPE def; 1003 InitOMXParams(&def); 1004 def.nPortIndex = kPortIndexOutput; 1005 1006 status_t err = mOMX->getParameter( 1007 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1008 1009 if (err == OK) { 1010 err = setupNativeWindowSizeFormatAndUsage( 1011 mNativeWindow.get(), &mNativeWindowUsageBits, preregister /* reconnect */); 1012 } 1013 if (err != OK) { 1014 mNativeWindowUsageBits = 0; 1015 return err; 1016 } 1017 1018 // Exits here for tunneled video playback codecs -- i.e. skips native window 1019 // buffer allocation step as this is managed by the tunneled OMX omponent 1020 // itself and explicitly sets def.nBufferCountActual to 0. 1021 if (mTunneled) { 1022 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 1023 def.nBufferCountActual = 0; 1024 err = mOMX->setParameter( 1025 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1026 1027 *minUndequeuedBuffers = 0; 1028 *bufferCount = 0; 1029 *bufferSize = 0; 1030 return err; 1031 } 1032 1033 *minUndequeuedBuffers = 0; 1034 err = mNativeWindow->query( 1035 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 1036 (int *)minUndequeuedBuffers); 1037 1038 if (err != 0) { 1039 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 1040 strerror(-err), -err); 1041 return err; 1042 } 1043 1044 // FIXME: assume that surface is controlled by app (native window 1045 // returns the number for the case when surface is not controlled by app) 1046 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 1047 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 1048 1049 // Use conservative allocation while also trying to reduce starvation 1050 // 1051 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 1052 // minimum needed for the consumer to be able to work 1053 // 2. try to allocate two (2) additional buffers to reduce starvation from 1054 // the consumer 1055 // plus an extra buffer to account for incorrect minUndequeuedBufs 1056 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 1057 OMX_U32 newBufferCount = 1058 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1059 def.nBufferCountActual = newBufferCount; 1060 err = mOMX->setParameter( 1061 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1062 1063 if (err == OK) { 1064 *minUndequeuedBuffers += extraBuffers; 1065 break; 1066 } 1067 1068 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1069 mComponentName.c_str(), newBufferCount, err); 1070 /* exit condition */ 1071 if (extraBuffers == 0) { 1072 return err; 1073 } 1074 } 1075 1076 err = native_window_set_buffer_count( 1077 mNativeWindow.get(), def.nBufferCountActual); 1078 1079 if (err != 0) { 1080 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1081 -err); 1082 return err; 1083 } 1084 1085 *bufferCount = def.nBufferCountActual; 1086 *bufferSize = def.nBufferSize; 1087 return err; 1088} 1089 1090status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1091 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1092 status_t err = configureOutputBuffersFromNativeWindow( 1093 &bufferCount, &bufferSize, &minUndequeuedBuffers, true /* preregister */); 1094 if (err != 0) 1095 return err; 1096 mNumUndequeuedBuffers = minUndequeuedBuffers; 1097 1098 if (!storingMetadataInDecodedBuffers()) { 1099 static_cast<Surface*>(mNativeWindow.get()) 1100 ->getIGraphicBufferProducer()->allowAllocation(true); 1101 } 1102 1103 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1104 "output port", 1105 mComponentName.c_str(), bufferCount, bufferSize); 1106 1107 // Dequeue buffers and send them to OMX 1108 for (OMX_U32 i = 0; i < bufferCount; i++) { 1109 ANativeWindowBuffer *buf; 1110 int fenceFd; 1111 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1112 if (err != 0) { 1113 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1114 break; 1115 } 1116 1117 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1118 BufferInfo info; 1119 info.mStatus = BufferInfo::OWNED_BY_US; 1120 info.mFenceFd = fenceFd; 1121 info.mIsReadFence = false; 1122 info.mRenderInfo = NULL; 1123 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 1124 info.mCodecData = info.mData; 1125 info.mGraphicBuffer = graphicBuffer; 1126 mBuffers[kPortIndexOutput].push(info); 1127 1128 IOMX::buffer_id bufferId; 1129 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 1130 &bufferId); 1131 if (err != 0) { 1132 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1133 "%d", i, err); 1134 break; 1135 } 1136 1137 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1138 1139 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1140 mComponentName.c_str(), 1141 bufferId, graphicBuffer.get()); 1142 } 1143 1144 OMX_U32 cancelStart; 1145 OMX_U32 cancelEnd; 1146 1147 if (err != 0) { 1148 // If an error occurred while dequeuing we need to cancel any buffers 1149 // that were dequeued. 1150 cancelStart = 0; 1151 cancelEnd = mBuffers[kPortIndexOutput].size(); 1152 } else { 1153 // Return the required minimum undequeued buffers to the native window. 1154 cancelStart = bufferCount - minUndequeuedBuffers; 1155 cancelEnd = bufferCount; 1156 } 1157 1158 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1159 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1160 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1161 status_t error = cancelBufferToNativeWindow(info); 1162 if (err == 0) { 1163 err = error; 1164 } 1165 } 1166 } 1167 1168 if (!storingMetadataInDecodedBuffers()) { 1169 static_cast<Surface*>(mNativeWindow.get()) 1170 ->getIGraphicBufferProducer()->allowAllocation(false); 1171 } 1172 1173 return err; 1174} 1175 1176status_t ACodec::allocateOutputMetadataBuffers() { 1177 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1178 status_t err = configureOutputBuffersFromNativeWindow( 1179 &bufferCount, &bufferSize, &minUndequeuedBuffers, 1180 mLegacyAdaptiveExperiment /* preregister */); 1181 if (err != 0) 1182 return err; 1183 mNumUndequeuedBuffers = minUndequeuedBuffers; 1184 1185 ALOGV("[%s] Allocating %u meta buffers on output port", 1186 mComponentName.c_str(), bufferCount); 1187 1188 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1189 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1190 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1191 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1192 1193 // Dequeue buffers and send them to OMX 1194 for (OMX_U32 i = 0; i < bufferCount; i++) { 1195 BufferInfo info; 1196 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1197 info.mFenceFd = -1; 1198 info.mRenderInfo = NULL; 1199 info.mGraphicBuffer = NULL; 1200 info.mDequeuedAt = mDequeueCounter; 1201 1202 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1203 if (mem == NULL || mem->pointer() == NULL) { 1204 return NO_MEMORY; 1205 } 1206 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1207 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1208 } 1209 info.mData = new ABuffer(mem->pointer(), mem->size()); 1210 info.mMemRef = mem; 1211 info.mCodecData = info.mData; 1212 info.mCodecRef = mem; 1213 1214 // we use useBuffer for metadata regardless of quirks 1215 err = mOMX->useBuffer( 1216 mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1217 mBuffers[kPortIndexOutput].push(info); 1218 1219 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1220 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1221 } 1222 1223 if (mLegacyAdaptiveExperiment) { 1224 // preallocate and preregister buffers 1225 static_cast<Surface *>(mNativeWindow.get()) 1226 ->getIGraphicBufferProducer()->allowAllocation(true); 1227 1228 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1229 "output port", 1230 mComponentName.c_str(), bufferCount, bufferSize); 1231 1232 // Dequeue buffers then cancel them all 1233 for (OMX_U32 i = 0; i < bufferCount; i++) { 1234 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1235 1236 ANativeWindowBuffer *buf; 1237 int fenceFd; 1238 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1239 if (err != 0) { 1240 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1241 break; 1242 } 1243 1244 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1245 mOMX->updateGraphicBufferInMeta( 1246 mNode, kPortIndexOutput, graphicBuffer, info->mBufferID); 1247 info->mStatus = BufferInfo::OWNED_BY_US; 1248 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1249 info->mGraphicBuffer = graphicBuffer; 1250 } 1251 1252 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1253 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1254 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1255 status_t error = cancelBufferToNativeWindow(info); 1256 if (err == OK) { 1257 err = error; 1258 } 1259 } 1260 } 1261 1262 static_cast<Surface*>(mNativeWindow.get()) 1263 ->getIGraphicBufferProducer()->allowAllocation(false); 1264 } 1265 1266 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1267 return err; 1268} 1269 1270status_t ACodec::submitOutputMetadataBuffer() { 1271 CHECK(storingMetadataInDecodedBuffers()); 1272 if (mMetadataBuffersToSubmit == 0) 1273 return OK; 1274 1275 BufferInfo *info = dequeueBufferFromNativeWindow(); 1276 if (info == NULL) { 1277 return ERROR_IO; 1278 } 1279 1280 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1281 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1282 1283 --mMetadataBuffersToSubmit; 1284 info->checkWriteFence("submitOutputMetadataBuffer"); 1285 status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd); 1286 info->mFenceFd = -1; 1287 if (err == OK) { 1288 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1289 } 1290 1291 return err; 1292} 1293 1294status_t ACodec::waitForFence(int fd, const char *dbg ) { 1295 status_t res = OK; 1296 if (fd >= 0) { 1297 sp<Fence> fence = new Fence(fd); 1298 res = fence->wait(IOMX::kFenceTimeoutMs); 1299 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1300 } 1301 return res; 1302} 1303 1304// static 1305const char *ACodec::_asString(BufferInfo::Status s) { 1306 switch (s) { 1307 case BufferInfo::OWNED_BY_US: return "OUR"; 1308 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1309 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1310 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1311 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1312 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1313 default: return "?"; 1314 } 1315} 1316 1317void ACodec::dumpBuffers(OMX_U32 portIndex) { 1318 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1319 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1320 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1321 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1322 const BufferInfo &info = mBuffers[portIndex][i]; 1323 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1324 i, info.mBufferID, info.mGraphicBuffer.get(), 1325 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1326 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1327 } 1328} 1329 1330status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1331 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1332 1333 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1334 mComponentName.c_str(), info->mBufferID); 1335 1336 info->checkWriteFence("cancelBufferToNativeWindow"); 1337 int err = mNativeWindow->cancelBuffer( 1338 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1339 info->mFenceFd = -1; 1340 1341 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1342 mComponentName.c_str(), info->mBufferID); 1343 // change ownership even if cancelBuffer fails 1344 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1345 1346 return err; 1347} 1348 1349void ACodec::updateRenderInfoForDequeuedBuffer( 1350 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1351 1352 info->mRenderInfo = 1353 mRenderTracker.updateInfoForDequeuedBuffer( 1354 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1355 1356 // check for any fences already signaled 1357 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1358} 1359 1360void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1361 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1362 mRenderTracker.dumpRenderQueue(); 1363 } 1364} 1365 1366void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1367 sp<AMessage> msg = mNotify->dup(); 1368 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1369 std::list<FrameRenderTracker::Info> done = 1370 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1371 1372 // unlink untracked frames 1373 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1374 it != done.cend(); ++it) { 1375 ssize_t index = it->getIndex(); 1376 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1377 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1378 } else if (index >= 0) { 1379 // THIS SHOULD NEVER HAPPEN 1380 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1381 } 1382 } 1383 1384 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1385 msg->post(); 1386 } 1387} 1388 1389ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1390 ANativeWindowBuffer *buf; 1391 CHECK(mNativeWindow.get() != NULL); 1392 1393 if (mTunneled) { 1394 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1395 " video playback mode mode!"); 1396 return NULL; 1397 } 1398 1399 if (mFatalError) { 1400 ALOGW("not dequeuing from native window due to fatal error"); 1401 return NULL; 1402 } 1403 1404 int fenceFd = -1; 1405 do { 1406 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1407 if (err != 0) { 1408 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1409 return NULL; 1410 } 1411 1412 bool stale = false; 1413 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1414 i--; 1415 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1416 1417 if (info->mGraphicBuffer != NULL && 1418 info->mGraphicBuffer->handle == buf->handle) { 1419 // Since consumers can attach buffers to BufferQueues, it is possible 1420 // that a known yet stale buffer can return from a surface that we 1421 // once used. We can simply ignore this as we have already dequeued 1422 // this buffer properly. NOTE: this does not eliminate all cases, 1423 // e.g. it is possible that we have queued the valid buffer to the 1424 // NW, and a stale copy of the same buffer gets dequeued - which will 1425 // be treated as the valid buffer by ACodec. 1426 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1427 ALOGI("dequeued stale buffer %p. discarding", buf); 1428 stale = true; 1429 break; 1430 } 1431 1432 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1433 info->mStatus = BufferInfo::OWNED_BY_US; 1434 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1435 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1436 return info; 1437 } 1438 } 1439 1440 // It is also possible to receive a previously unregistered buffer 1441 // in non-meta mode. These should be treated as stale buffers. The 1442 // same is possible in meta mode, in which case, it will be treated 1443 // as a normal buffer, which is not desirable. 1444 // TODO: fix this. 1445 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1446 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1447 stale = true; 1448 } 1449 if (stale) { 1450 // TODO: detach stale buffer, but there is no API yet to do it. 1451 buf = NULL; 1452 } 1453 } while (buf == NULL); 1454 1455 // get oldest undequeued buffer 1456 BufferInfo *oldest = NULL; 1457 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1458 i--; 1459 BufferInfo *info = 1460 &mBuffers[kPortIndexOutput].editItemAt(i); 1461 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1462 (oldest == NULL || 1463 // avoid potential issues from counter rolling over 1464 mDequeueCounter - info->mDequeuedAt > 1465 mDequeueCounter - oldest->mDequeuedAt)) { 1466 oldest = info; 1467 } 1468 } 1469 1470 // it is impossible dequeue a buffer when there are no buffers with ANW 1471 CHECK(oldest != NULL); 1472 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1473 // while loop above does not complete 1474 CHECK(storingMetadataInDecodedBuffers()); 1475 1476 // discard buffer in LRU info and replace with new buffer 1477 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1478 oldest->mStatus = BufferInfo::OWNED_BY_US; 1479 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1480 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1481 oldest->mRenderInfo = NULL; 1482 1483 mOMX->updateGraphicBufferInMeta( 1484 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 1485 oldest->mBufferID); 1486 1487 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1488 VideoGrallocMetadata *grallocMeta = 1489 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base()); 1490 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1491 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1492 mDequeueCounter - oldest->mDequeuedAt, 1493 (void *)(uintptr_t)grallocMeta->pHandle, 1494 oldest->mGraphicBuffer->handle, oldest->mData->base()); 1495 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1496 VideoNativeMetadata *nativeMeta = 1497 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base()); 1498 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1499 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1500 mDequeueCounter - oldest->mDequeuedAt, 1501 (void *)(uintptr_t)nativeMeta->pBuffer, 1502 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base()); 1503 } 1504 1505 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1506 return oldest; 1507} 1508 1509status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1510 status_t err = OK; 1511 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1512 i--; 1513 status_t err2 = freeBuffer(portIndex, i); 1514 if (err == OK) { 1515 err = err2; 1516 } 1517 } 1518 1519 // clear mDealer even on an error 1520 mDealer[portIndex].clear(); 1521 return err; 1522} 1523 1524status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1525 status_t err = OK; 1526 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1527 i--; 1528 BufferInfo *info = 1529 &mBuffers[kPortIndexOutput].editItemAt(i); 1530 1531 // At this time some buffers may still be with the component 1532 // or being drained. 1533 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1534 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1535 status_t err2 = freeBuffer(kPortIndexOutput, i); 1536 if (err == OK) { 1537 err = err2; 1538 } 1539 } 1540 } 1541 1542 return err; 1543} 1544 1545status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1546 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1547 status_t err = OK; 1548 1549 // there should not be any fences in the metadata 1550 MetadataBufferType type = 1551 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1552 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL 1553 && info->mData->size() >= sizeof(VideoNativeMetadata)) { 1554 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; 1555 if (fenceFd >= 0) { 1556 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1557 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1558 } 1559 } 1560 1561 switch (info->mStatus) { 1562 case BufferInfo::OWNED_BY_US: 1563 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1564 (void)cancelBufferToNativeWindow(info); 1565 } 1566 // fall through 1567 1568 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1569 err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID); 1570 break; 1571 1572 default: 1573 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1574 err = FAILED_TRANSACTION; 1575 break; 1576 } 1577 1578 if (info->mFenceFd >= 0) { 1579 ::close(info->mFenceFd); 1580 } 1581 1582 if (portIndex == kPortIndexOutput) { 1583 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1584 info->mRenderInfo = NULL; 1585 } 1586 1587 // remove buffer even if mOMX->freeBuffer fails 1588 mBuffers[portIndex].removeAt(i); 1589 return err; 1590} 1591 1592ACodec::BufferInfo *ACodec::findBufferByID( 1593 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1594 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1595 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1596 1597 if (info->mBufferID == bufferID) { 1598 if (index != NULL) { 1599 *index = i; 1600 } 1601 return info; 1602 } 1603 } 1604 1605 ALOGE("Could not find buffer with ID %u", bufferID); 1606 return NULL; 1607} 1608 1609status_t ACodec::setComponentRole( 1610 bool isEncoder, const char *mime) { 1611 const char *role = getComponentRole(isEncoder, mime); 1612 if (role == NULL) { 1613 return BAD_VALUE; 1614 } 1615 status_t err = setComponentRole(mOMX, mNode, role); 1616 if (err != OK) { 1617 ALOGW("[%s] Failed to set standard component role '%s'.", 1618 mComponentName.c_str(), role); 1619 } 1620 return err; 1621} 1622 1623//static 1624const char *ACodec::getComponentRole( 1625 bool isEncoder, const char *mime) { 1626 struct MimeToRole { 1627 const char *mime; 1628 const char *decoderRole; 1629 const char *encoderRole; 1630 }; 1631 1632 static const MimeToRole kMimeToRole[] = { 1633 { MEDIA_MIMETYPE_AUDIO_MPEG, 1634 "audio_decoder.mp3", "audio_encoder.mp3" }, 1635 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I, 1636 "audio_decoder.mp1", "audio_encoder.mp1" }, 1637 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, 1638 "audio_decoder.mp2", "audio_encoder.mp2" }, 1639 { MEDIA_MIMETYPE_AUDIO_AMR_NB, 1640 "audio_decoder.amrnb", "audio_encoder.amrnb" }, 1641 { MEDIA_MIMETYPE_AUDIO_AMR_WB, 1642 "audio_decoder.amrwb", "audio_encoder.amrwb" }, 1643 { MEDIA_MIMETYPE_AUDIO_AAC, 1644 "audio_decoder.aac", "audio_encoder.aac" }, 1645 { MEDIA_MIMETYPE_AUDIO_VORBIS, 1646 "audio_decoder.vorbis", "audio_encoder.vorbis" }, 1647 { MEDIA_MIMETYPE_AUDIO_OPUS, 1648 "audio_decoder.opus", "audio_encoder.opus" }, 1649 { MEDIA_MIMETYPE_AUDIO_G711_MLAW, 1650 "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" }, 1651 { MEDIA_MIMETYPE_AUDIO_G711_ALAW, 1652 "audio_decoder.g711alaw", "audio_encoder.g711alaw" }, 1653 { MEDIA_MIMETYPE_VIDEO_AVC, 1654 "video_decoder.avc", "video_encoder.avc" }, 1655 { MEDIA_MIMETYPE_VIDEO_HEVC, 1656 "video_decoder.hevc", "video_encoder.hevc" }, 1657 { MEDIA_MIMETYPE_VIDEO_MPEG4, 1658 "video_decoder.mpeg4", "video_encoder.mpeg4" }, 1659 { MEDIA_MIMETYPE_VIDEO_H263, 1660 "video_decoder.h263", "video_encoder.h263" }, 1661 { MEDIA_MIMETYPE_VIDEO_VP8, 1662 "video_decoder.vp8", "video_encoder.vp8" }, 1663 { MEDIA_MIMETYPE_VIDEO_VP9, 1664 "video_decoder.vp9", "video_encoder.vp9" }, 1665 { MEDIA_MIMETYPE_AUDIO_RAW, 1666 "audio_decoder.raw", "audio_encoder.raw" }, 1667 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, 1668 "video_decoder.dolby-vision", "video_encoder.dolby-vision" }, 1669 { MEDIA_MIMETYPE_AUDIO_FLAC, 1670 "audio_decoder.flac", "audio_encoder.flac" }, 1671 { MEDIA_MIMETYPE_AUDIO_MSGSM, 1672 "audio_decoder.gsm", "audio_encoder.gsm" }, 1673 { MEDIA_MIMETYPE_VIDEO_MPEG2, 1674 "video_decoder.mpeg2", "video_encoder.mpeg2" }, 1675 { MEDIA_MIMETYPE_AUDIO_AC3, 1676 "audio_decoder.ac3", "audio_encoder.ac3" }, 1677 { MEDIA_MIMETYPE_AUDIO_EAC3, 1678 "audio_decoder.eac3", "audio_encoder.eac3" }, 1679 }; 1680 1681 static const size_t kNumMimeToRole = 1682 sizeof(kMimeToRole) / sizeof(kMimeToRole[0]); 1683 1684 size_t i; 1685 for (i = 0; i < kNumMimeToRole; ++i) { 1686 if (!strcasecmp(mime, kMimeToRole[i].mime)) { 1687 break; 1688 } 1689 } 1690 1691 if (i == kNumMimeToRole) { 1692 return NULL; 1693 } 1694 1695 return isEncoder ? kMimeToRole[i].encoderRole 1696 : kMimeToRole[i].decoderRole; 1697} 1698 1699//static 1700status_t ACodec::setComponentRole( 1701 const sp<IOMX> &omx, IOMX::node_id node, const char *role) { 1702 OMX_PARAM_COMPONENTROLETYPE roleParams; 1703 InitOMXParams(&roleParams); 1704 1705 strncpy((char *)roleParams.cRole, 1706 role, OMX_MAX_STRINGNAME_SIZE - 1); 1707 1708 roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0'; 1709 1710 return omx->setParameter( 1711 node, OMX_IndexParamStandardComponentRole, 1712 &roleParams, sizeof(roleParams)); 1713} 1714 1715status_t ACodec::configureCodec( 1716 const char *mime, const sp<AMessage> &msg) { 1717 int32_t encoder; 1718 if (!msg->findInt32("encoder", &encoder)) { 1719 encoder = false; 1720 } 1721 1722 sp<AMessage> inputFormat = new AMessage; 1723 sp<AMessage> outputFormat = new AMessage; 1724 mConfigFormat = msg; 1725 1726 mIsEncoder = encoder; 1727 1728 mInputMetadataType = kMetadataBufferTypeInvalid; 1729 mOutputMetadataType = kMetadataBufferTypeInvalid; 1730 1731 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1732 1733 if (err != OK) { 1734 return err; 1735 } 1736 1737 int32_t bitRate = 0; 1738 // FLAC encoder doesn't need a bitrate, other encoders do 1739 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1740 && !msg->findInt32("bitrate", &bitRate)) { 1741 return INVALID_OPERATION; 1742 } 1743 1744 // propagate bitrate to the output so that the muxer has it 1745 if (encoder && msg->findInt32("bitrate", &bitRate)) { 1746 // Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the 1747 // average bitrate. We've been setting both bitrate and max-bitrate to this same value. 1748 outputFormat->setInt32("bitrate", bitRate); 1749 outputFormat->setInt32("max-bitrate", bitRate); 1750 } 1751 1752 int32_t storeMeta; 1753 if (encoder 1754 && msg->findInt32("android._input-metadata-buffer-type", &storeMeta) 1755 && storeMeta != kMetadataBufferTypeInvalid) { 1756 mInputMetadataType = (MetadataBufferType)storeMeta; 1757 err = mOMX->storeMetaDataInBuffers( 1758 mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1759 if (err != OK) { 1760 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1761 mComponentName.c_str(), err); 1762 1763 return err; 1764 } else if (storeMeta == kMetadataBufferTypeANWBuffer 1765 && mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1766 // IOMX translates ANWBuffers to gralloc source already. 1767 mInputMetadataType = (MetadataBufferType)storeMeta; 1768 } 1769 1770 uint32_t usageBits; 1771 if (mOMX->getParameter( 1772 mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1773 &usageBits, sizeof(usageBits)) == OK) { 1774 inputFormat->setInt32( 1775 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1776 } 1777 } 1778 1779 int32_t prependSPSPPS = 0; 1780 if (encoder 1781 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1782 && prependSPSPPS != 0) { 1783 OMX_INDEXTYPE index; 1784 err = mOMX->getExtensionIndex( 1785 mNode, 1786 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1787 &index); 1788 1789 if (err == OK) { 1790 PrependSPSPPSToIDRFramesParams params; 1791 InitOMXParams(¶ms); 1792 params.bEnable = OMX_TRUE; 1793 1794 err = mOMX->setParameter( 1795 mNode, index, ¶ms, sizeof(params)); 1796 } 1797 1798 if (err != OK) { 1799 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1800 "IDR frames. (err %d)", err); 1801 1802 return err; 1803 } 1804 } 1805 1806 // Only enable metadata mode on encoder output if encoder can prepend 1807 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1808 // opaque handle, to which we don't have access. 1809 int32_t video = !strncasecmp(mime, "video/", 6); 1810 mIsVideo = video; 1811 if (encoder && video) { 1812 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1813 && msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta) 1814 && storeMeta != 0); 1815 1816 mOutputMetadataType = kMetadataBufferTypeNativeHandleSource; 1817 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType); 1818 if (err != OK) { 1819 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1820 mComponentName.c_str(), err); 1821 } 1822 1823 if (!msg->findInt64( 1824 "repeat-previous-frame-after", 1825 &mRepeatFrameDelayUs)) { 1826 mRepeatFrameDelayUs = -1ll; 1827 } 1828 1829 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1830 mMaxPtsGapUs = -1ll; 1831 } 1832 1833 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1834 mMaxFps = -1; 1835 } 1836 1837 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1838 mTimePerCaptureUs = -1ll; 1839 } 1840 1841 if (!msg->findInt32( 1842 "create-input-buffers-suspended", 1843 (int32_t*)&mCreateInputBuffersSuspended)) { 1844 mCreateInputBuffersSuspended = false; 1845 } 1846 } 1847 1848 // NOTE: we only use native window for video decoders 1849 sp<RefBase> obj; 1850 bool haveNativeWindow = msg->findObject("native-window", &obj) 1851 && obj != NULL && video && !encoder; 1852 mUsingNativeWindow = haveNativeWindow; 1853 mLegacyAdaptiveExperiment = false; 1854 if (video && !encoder) { 1855 inputFormat->setInt32("adaptive-playback", false); 1856 1857 int32_t usageProtected; 1858 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1859 if (!haveNativeWindow) { 1860 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1861 return PERMISSION_DENIED; 1862 } 1863 mFlags |= kFlagIsGrallocUsageProtected; 1864 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1865 } 1866 1867 if (mFlags & kFlagIsSecure) { 1868 // use native_handles for secure input buffers 1869 err = mOMX->enableNativeBuffers( 1870 mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1871 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1872 err = OK; // ignore error for now 1873 } 1874 } 1875 if (haveNativeWindow) { 1876 sp<ANativeWindow> nativeWindow = 1877 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1878 1879 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1880 int32_t autoFrc; 1881 if (msg->findInt32("auto-frc", &autoFrc)) { 1882 bool enabled = autoFrc; 1883 OMX_CONFIG_BOOLEANTYPE config; 1884 InitOMXParams(&config); 1885 config.bEnabled = (OMX_BOOL)enabled; 1886 status_t temp = mOMX->setConfig( 1887 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1888 &config, sizeof(config)); 1889 if (temp == OK) { 1890 outputFormat->setInt32("auto-frc", enabled); 1891 } else if (enabled) { 1892 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1893 } 1894 } 1895 // END of temporary support for automatic FRC 1896 1897 int32_t tunneled; 1898 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1899 tunneled != 0) { 1900 ALOGI("Configuring TUNNELED video playback."); 1901 mTunneled = true; 1902 1903 int32_t audioHwSync = 0; 1904 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1905 ALOGW("No Audio HW Sync provided for video tunnel"); 1906 } 1907 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1908 if (err != OK) { 1909 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1910 audioHwSync, nativeWindow.get()); 1911 return err; 1912 } 1913 1914 int32_t maxWidth = 0, maxHeight = 0; 1915 if (msg->findInt32("max-width", &maxWidth) && 1916 msg->findInt32("max-height", &maxHeight)) { 1917 1918 err = mOMX->prepareForAdaptivePlayback( 1919 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1920 if (err != OK) { 1921 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1922 mComponentName.c_str(), err); 1923 // allow failure 1924 err = OK; 1925 } else { 1926 inputFormat->setInt32("max-width", maxWidth); 1927 inputFormat->setInt32("max-height", maxHeight); 1928 inputFormat->setInt32("adaptive-playback", true); 1929 } 1930 } 1931 } else { 1932 ALOGV("Configuring CPU controlled video playback."); 1933 mTunneled = false; 1934 1935 // Explicity reset the sideband handle of the window for 1936 // non-tunneled video in case the window was previously used 1937 // for a tunneled video playback. 1938 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1939 if (err != OK) { 1940 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1941 return err; 1942 } 1943 1944 // Always try to enable dynamic output buffers on native surface 1945 mOutputMetadataType = kMetadataBufferTypeANWBuffer; 1946 err = mOMX->storeMetaDataInBuffers( 1947 mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1948 if (err != OK) { 1949 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1950 mComponentName.c_str(), err); 1951 1952 // if adaptive playback has been requested, try JB fallback 1953 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1954 // LARGE MEMORY REQUIREMENT 1955 1956 // we will not do adaptive playback on software accessed 1957 // surfaces as they never had to respond to changes in the 1958 // crop window, and we don't trust that they will be able to. 1959 int usageBits = 0; 1960 bool canDoAdaptivePlayback; 1961 1962 if (nativeWindow->query( 1963 nativeWindow.get(), 1964 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1965 &usageBits) != OK) { 1966 canDoAdaptivePlayback = false; 1967 } else { 1968 canDoAdaptivePlayback = 1969 (usageBits & 1970 (GRALLOC_USAGE_SW_READ_MASK | 1971 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1972 } 1973 1974 int32_t maxWidth = 0, maxHeight = 0; 1975 if (canDoAdaptivePlayback && 1976 msg->findInt32("max-width", &maxWidth) && 1977 msg->findInt32("max-height", &maxHeight)) { 1978 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1979 mComponentName.c_str(), maxWidth, maxHeight); 1980 1981 err = mOMX->prepareForAdaptivePlayback( 1982 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1983 maxHeight); 1984 ALOGW_IF(err != OK, 1985 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1986 mComponentName.c_str(), err); 1987 1988 if (err == OK) { 1989 inputFormat->setInt32("max-width", maxWidth); 1990 inputFormat->setInt32("max-height", maxHeight); 1991 inputFormat->setInt32("adaptive-playback", true); 1992 } 1993 } 1994 // allow failure 1995 err = OK; 1996 } else { 1997 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1998 mComponentName.c_str()); 1999 CHECK(storingMetadataInDecodedBuffers()); 2000 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 2001 "legacy-adaptive", !msg->contains("no-experiments")); 2002 2003 inputFormat->setInt32("adaptive-playback", true); 2004 } 2005 2006 int32_t push; 2007 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 2008 && push != 0) { 2009 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 2010 } 2011 } 2012 2013 int32_t rotationDegrees; 2014 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 2015 mRotationDegrees = rotationDegrees; 2016 } else { 2017 mRotationDegrees = 0; 2018 } 2019 } 2020 2021 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 2022 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 2023 // invalid encodings will default to PCM-16bit in setupRawAudioFormat. 2024 2025 if (video) { 2026 // determine need for software renderer 2027 bool usingSwRenderer = false; 2028 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 2029 usingSwRenderer = true; 2030 haveNativeWindow = false; 2031 } 2032 2033 if (encoder) { 2034 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 2035 } else { 2036 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2037 } 2038 2039 if (err != OK) { 2040 return err; 2041 } 2042 2043 if (haveNativeWindow) { 2044 mNativeWindow = static_cast<Surface *>(obj.get()); 2045 } 2046 2047 // initialize native window now to get actual output format 2048 // TODO: this is needed for some encoders even though they don't use native window 2049 err = initNativeWindow(); 2050 if (err != OK) { 2051 return err; 2052 } 2053 2054 // fallback for devices that do not handle flex-YUV for native buffers 2055 if (haveNativeWindow) { 2056 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 2057 if (msg->findInt32("color-format", &requestedColorFormat) && 2058 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 2059 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 2060 if (err != OK) { 2061 return err; 2062 } 2063 int32_t colorFormat = OMX_COLOR_FormatUnused; 2064 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 2065 if (!outputFormat->findInt32("color-format", &colorFormat)) { 2066 ALOGE("ouptut port did not have a color format (wrong domain?)"); 2067 return BAD_VALUE; 2068 } 2069 ALOGD("[%s] Requested output format %#x and got %#x.", 2070 mComponentName.c_str(), requestedColorFormat, colorFormat); 2071 if (!isFlexibleColorFormat( 2072 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 2073 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 2074 // device did not handle flex-YUV request for native window, fall back 2075 // to SW renderer 2076 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 2077 mNativeWindow.clear(); 2078 mNativeWindowUsageBits = 0; 2079 haveNativeWindow = false; 2080 usingSwRenderer = true; 2081 if (storingMetadataInDecodedBuffers()) { 2082 err = mOMX->storeMetaDataInBuffers( 2083 mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 2084 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 2085 // TODO: implement adaptive-playback support for bytebuffer mode. 2086 // This is done by SW codecs, but most HW codecs don't support it. 2087 inputFormat->setInt32("adaptive-playback", false); 2088 } 2089 if (err == OK) { 2090 err = mOMX->enableNativeBuffers( 2091 mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 2092 } 2093 if (mFlags & kFlagIsGrallocUsageProtected) { 2094 // fallback is not supported for protected playback 2095 err = PERMISSION_DENIED; 2096 } else if (err == OK) { 2097 err = setupVideoDecoder( 2098 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2099 } 2100 } 2101 } 2102 } 2103 2104 if (usingSwRenderer) { 2105 outputFormat->setInt32("using-sw-renderer", 1); 2106 } 2107 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2108 int32_t numChannels, sampleRate; 2109 if (!msg->findInt32("channel-count", &numChannels) 2110 || !msg->findInt32("sample-rate", &sampleRate)) { 2111 // Since we did not always check for these, leave them optional 2112 // and have the decoder figure it all out. 2113 err = OK; 2114 } else { 2115 err = setupRawAudioFormat( 2116 encoder ? kPortIndexInput : kPortIndexOutput, 2117 sampleRate, 2118 numChannels); 2119 } 2120 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2121 int32_t numChannels, sampleRate; 2122 if (!msg->findInt32("channel-count", &numChannels) 2123 || !msg->findInt32("sample-rate", &sampleRate)) { 2124 err = INVALID_OPERATION; 2125 } else { 2126 int32_t isADTS, aacProfile; 2127 int32_t sbrMode; 2128 int32_t maxOutputChannelCount; 2129 int32_t pcmLimiterEnable; 2130 drcParams_t drc; 2131 if (!msg->findInt32("is-adts", &isADTS)) { 2132 isADTS = 0; 2133 } 2134 if (!msg->findInt32("aac-profile", &aacProfile)) { 2135 aacProfile = OMX_AUDIO_AACObjectNull; 2136 } 2137 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2138 sbrMode = -1; 2139 } 2140 2141 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2142 maxOutputChannelCount = -1; 2143 } 2144 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2145 // value is unknown 2146 pcmLimiterEnable = -1; 2147 } 2148 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2149 // value is unknown 2150 drc.encodedTargetLevel = -1; 2151 } 2152 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2153 // value is unknown 2154 drc.drcCut = -1; 2155 } 2156 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2157 // value is unknown 2158 drc.drcBoost = -1; 2159 } 2160 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2161 // value is unknown 2162 drc.heavyCompression = -1; 2163 } 2164 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2165 // value is unknown 2166 drc.targetRefLevel = -1; 2167 } 2168 2169 err = setupAACCodec( 2170 encoder, numChannels, sampleRate, bitRate, aacProfile, 2171 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2172 pcmLimiterEnable); 2173 } 2174 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2175 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2176 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2177 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2178 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2179 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2180 // These are PCM-like formats with a fixed sample rate but 2181 // a variable number of channels. 2182 2183 int32_t numChannels; 2184 if (!msg->findInt32("channel-count", &numChannels)) { 2185 err = INVALID_OPERATION; 2186 } else { 2187 int32_t sampleRate; 2188 if (!msg->findInt32("sample-rate", &sampleRate)) { 2189 sampleRate = 8000; 2190 } 2191 err = setupG711Codec(encoder, sampleRate, numChannels); 2192 } 2193 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2194 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2195 if (encoder && 2196 (!msg->findInt32("channel-count", &numChannels) 2197 || !msg->findInt32("sample-rate", &sampleRate))) { 2198 ALOGE("missing channel count or sample rate for FLAC encoder"); 2199 err = INVALID_OPERATION; 2200 } else { 2201 if (encoder) { 2202 if (!msg->findInt32( 2203 "complexity", &compressionLevel) && 2204 !msg->findInt32( 2205 "flac-compression-level", &compressionLevel)) { 2206 compressionLevel = 5; // default FLAC compression level 2207 } else if (compressionLevel < 0) { 2208 ALOGW("compression level %d outside [0..8] range, " 2209 "using 0", 2210 compressionLevel); 2211 compressionLevel = 0; 2212 } else if (compressionLevel > 8) { 2213 ALOGW("compression level %d outside [0..8] range, " 2214 "using 8", 2215 compressionLevel); 2216 compressionLevel = 8; 2217 } 2218 } 2219 err = setupFlacCodec( 2220 encoder, numChannels, sampleRate, compressionLevel); 2221 } 2222 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2223 int32_t numChannels, sampleRate; 2224 if (encoder 2225 || !msg->findInt32("channel-count", &numChannels) 2226 || !msg->findInt32("sample-rate", &sampleRate)) { 2227 err = INVALID_OPERATION; 2228 } else { 2229 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); 2230 } 2231 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2232 int32_t numChannels; 2233 int32_t sampleRate; 2234 if (!msg->findInt32("channel-count", &numChannels) 2235 || !msg->findInt32("sample-rate", &sampleRate)) { 2236 err = INVALID_OPERATION; 2237 } else { 2238 err = setupAC3Codec(encoder, numChannels, sampleRate); 2239 } 2240 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2241 int32_t numChannels; 2242 int32_t sampleRate; 2243 if (!msg->findInt32("channel-count", &numChannels) 2244 || !msg->findInt32("sample-rate", &sampleRate)) { 2245 err = INVALID_OPERATION; 2246 } else { 2247 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2248 } 2249 } 2250 2251 if (err != OK) { 2252 return err; 2253 } 2254 2255 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2256 mEncoderDelay = 0; 2257 } 2258 2259 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2260 mEncoderPadding = 0; 2261 } 2262 2263 if (msg->findInt32("channel-mask", &mChannelMask)) { 2264 mChannelMaskPresent = true; 2265 } else { 2266 mChannelMaskPresent = false; 2267 } 2268 2269 int32_t maxInputSize; 2270 if (msg->findInt32("max-input-size", &maxInputSize)) { 2271 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2272 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2273 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2274 } 2275 2276 int32_t priority; 2277 if (msg->findInt32("priority", &priority)) { 2278 err = setPriority(priority); 2279 } 2280 2281 int32_t rateInt = -1; 2282 float rateFloat = -1; 2283 if (!msg->findFloat("operating-rate", &rateFloat)) { 2284 msg->findInt32("operating-rate", &rateInt); 2285 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2286 } 2287 if (rateFloat > 0) { 2288 err = setOperatingRate(rateFloat, video); 2289 } 2290 2291 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame. 2292 mBaseOutputFormat = outputFormat; 2293 // trigger a kWhatOutputFormatChanged msg on first buffer 2294 mLastOutputFormat.clear(); 2295 2296 err = getPortFormat(kPortIndexInput, inputFormat); 2297 if (err == OK) { 2298 err = getPortFormat(kPortIndexOutput, outputFormat); 2299 if (err == OK) { 2300 mInputFormat = inputFormat; 2301 mOutputFormat = outputFormat; 2302 } 2303 } 2304 2305 // create data converters if needed 2306 if (!video && err == OK) { 2307 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 2308 if (encoder) { 2309 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2310 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); 2311 if (mConverter[kPortIndexInput] != NULL) { 2312 mInputFormat->setInt32("pcm-encoding", pcmEncoding); 2313 } 2314 } else { 2315 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2316 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 2317 if (mConverter[kPortIndexOutput] != NULL) { 2318 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 2319 } 2320 } 2321 } 2322 2323 return err; 2324} 2325 2326status_t ACodec::setPriority(int32_t priority) { 2327 if (priority < 0) { 2328 return BAD_VALUE; 2329 } 2330 OMX_PARAM_U32TYPE config; 2331 InitOMXParams(&config); 2332 config.nU32 = (OMX_U32)priority; 2333 status_t temp = mOMX->setConfig( 2334 mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2335 &config, sizeof(config)); 2336 if (temp != OK) { 2337 ALOGI("codec does not support config priority (err %d)", temp); 2338 } 2339 return OK; 2340} 2341 2342status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2343 if (rateFloat < 0) { 2344 return BAD_VALUE; 2345 } 2346 OMX_U32 rate; 2347 if (isVideo) { 2348 if (rateFloat > 65535) { 2349 return BAD_VALUE; 2350 } 2351 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2352 } else { 2353 if (rateFloat > UINT_MAX) { 2354 return BAD_VALUE; 2355 } 2356 rate = (OMX_U32)(rateFloat); 2357 } 2358 OMX_PARAM_U32TYPE config; 2359 InitOMXParams(&config); 2360 config.nU32 = rate; 2361 status_t err = mOMX->setConfig( 2362 mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2363 &config, sizeof(config)); 2364 if (err != OK) { 2365 ALOGI("codec does not support config operating rate (err %d)", err); 2366 } 2367 return OK; 2368} 2369 2370status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2371 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2372 InitOMXParams(¶ms); 2373 params.nPortIndex = kPortIndexOutput; 2374 status_t err = mOMX->getConfig( 2375 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2376 if (err == OK) { 2377 *intraRefreshPeriod = params.nRefreshPeriod; 2378 return OK; 2379 } 2380 2381 // Fallback to query through standard OMX index. 2382 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2383 InitOMXParams(&refreshParams); 2384 refreshParams.nPortIndex = kPortIndexOutput; 2385 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2386 err = mOMX->getParameter( 2387 mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2388 if (err != OK || refreshParams.nCirMBs == 0) { 2389 *intraRefreshPeriod = 0; 2390 return OK; 2391 } 2392 2393 // Calculate period based on width and height 2394 uint32_t width, height; 2395 OMX_PARAM_PORTDEFINITIONTYPE def; 2396 InitOMXParams(&def); 2397 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2398 def.nPortIndex = kPortIndexOutput; 2399 err = mOMX->getParameter( 2400 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2401 if (err != OK) { 2402 *intraRefreshPeriod = 0; 2403 return err; 2404 } 2405 width = video_def->nFrameWidth; 2406 height = video_def->nFrameHeight; 2407 // Use H.264/AVC MacroBlock size 16x16 2408 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2409 2410 return OK; 2411} 2412 2413status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2414 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2415 InitOMXParams(¶ms); 2416 params.nPortIndex = kPortIndexOutput; 2417 params.nRefreshPeriod = intraRefreshPeriod; 2418 status_t err = mOMX->setConfig( 2419 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2420 if (err == OK) { 2421 return OK; 2422 } 2423 2424 // Only in configure state, a component could invoke setParameter. 2425 if (!inConfigure) { 2426 return INVALID_OPERATION; 2427 } else { 2428 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2429 } 2430 2431 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2432 InitOMXParams(&refreshParams); 2433 refreshParams.nPortIndex = kPortIndexOutput; 2434 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2435 2436 if (intraRefreshPeriod == 0) { 2437 // 0 means disable intra refresh. 2438 refreshParams.nCirMBs = 0; 2439 } else { 2440 // Calculate macroblocks that need to be intra coded base on width and height 2441 uint32_t width, height; 2442 OMX_PARAM_PORTDEFINITIONTYPE def; 2443 InitOMXParams(&def); 2444 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2445 def.nPortIndex = kPortIndexOutput; 2446 err = mOMX->getParameter( 2447 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2448 if (err != OK) { 2449 return err; 2450 } 2451 width = video_def->nFrameWidth; 2452 height = video_def->nFrameHeight; 2453 // Use H.264/AVC MacroBlock size 16x16 2454 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2455 } 2456 2457 err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh, 2458 &refreshParams, sizeof(refreshParams)); 2459 if (err != OK) { 2460 return err; 2461 } 2462 2463 return OK; 2464} 2465 2466status_t ACodec::configureTemporalLayers( 2467 const sp<AMessage> &msg, bool inConfigure, sp<AMessage> &outputFormat) { 2468 if (!mIsVideo || !mIsEncoder) { 2469 return INVALID_OPERATION; 2470 } 2471 2472 AString tsSchema; 2473 if (!msg->findString("ts-schema", &tsSchema)) { 2474 return OK; 2475 } 2476 2477 unsigned int numLayers = 0; 2478 unsigned int numBLayers = 0; 2479 int tags; 2480 char dummy; 2481 OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE pattern = 2482 OMX_VIDEO_AndroidTemporalLayeringPatternNone; 2483 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1 2484 && numLayers > 0) { 2485 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC; 2486 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c", 2487 &numLayers, &dummy, &numBLayers, &dummy)) 2488 && (tags == 1 || (tags == 3 && dummy == '+')) 2489 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) { 2490 numLayers += numBLayers; 2491 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid; 2492 } else { 2493 ALOGI("Ignoring unsupported ts-schema [%s]", tsSchema.c_str()); 2494 return BAD_VALUE; 2495 } 2496 2497 OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layerParams; 2498 InitOMXParams(&layerParams); 2499 layerParams.nPortIndex = kPortIndexOutput; 2500 2501 status_t err = mOMX->getParameter( 2502 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2503 &layerParams, sizeof(layerParams)); 2504 2505 if (err != OK) { 2506 return err; 2507 } else if (!(layerParams.eSupportedPatterns & pattern)) { 2508 return BAD_VALUE; 2509 } 2510 2511 numLayers = min(numLayers, layerParams.nLayerCountMax); 2512 numBLayers = min(numBLayers, layerParams.nBLayerCountMax); 2513 2514 if (!inConfigure) { 2515 OMX_VIDEO_CONFIG_ANDROID_TEMPORALLAYERINGTYPE layerConfig; 2516 InitOMXParams(&layerConfig); 2517 layerConfig.nPortIndex = kPortIndexOutput; 2518 layerConfig.ePattern = pattern; 2519 layerConfig.nPLayerCountActual = numLayers - numBLayers; 2520 layerConfig.nBLayerCountActual = numBLayers; 2521 layerConfig.bBitrateRatiosSpecified = OMX_FALSE; 2522 2523 err = mOMX->setConfig( 2524 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidVideoTemporalLayering, 2525 &layerConfig, sizeof(layerConfig)); 2526 } else { 2527 layerParams.ePattern = pattern; 2528 layerParams.nPLayerCountActual = numLayers - numBLayers; 2529 layerParams.nBLayerCountActual = numBLayers; 2530 layerParams.bBitrateRatiosSpecified = OMX_FALSE; 2531 2532 err = mOMX->setParameter( 2533 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2534 &layerParams, sizeof(layerParams)); 2535 } 2536 2537 AString configSchema; 2538 if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternAndroid) { 2539 configSchema = AStringPrintf("android.generic.%u+%u", numLayers - numBLayers, numBLayers); 2540 } else if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC) { 2541 configSchema = AStringPrintf("webrtc.vp8.%u", numLayers); 2542 } 2543 2544 if (err != OK) { 2545 ALOGW("Failed to set temporal layers to %s (requested %s)", 2546 configSchema.c_str(), tsSchema.c_str()); 2547 return err; 2548 } 2549 2550 err = mOMX->getParameter( 2551 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2552 &layerParams, sizeof(layerParams)); 2553 2554 if (err == OK) { 2555 ALOGD("Temporal layers requested:%s configured:%s got:%s(%u: P=%u, B=%u)", 2556 tsSchema.c_str(), configSchema.c_str(), 2557 asString(layerParams.ePattern), layerParams.ePattern, 2558 layerParams.nPLayerCountActual, layerParams.nBLayerCountActual); 2559 2560 if (outputFormat.get() == mOutputFormat.get()) { 2561 mOutputFormat = mOutputFormat->dup(); // trigger an output format change event 2562 } 2563 // assume we got what we configured 2564 outputFormat->setString("ts-schema", configSchema); 2565 } 2566 return err; 2567} 2568 2569status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2570 OMX_PARAM_PORTDEFINITIONTYPE def; 2571 InitOMXParams(&def); 2572 def.nPortIndex = portIndex; 2573 2574 status_t err = mOMX->getParameter( 2575 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2576 2577 if (err != OK) { 2578 return err; 2579 } 2580 2581 if (def.nBufferSize >= size) { 2582 return OK; 2583 } 2584 2585 def.nBufferSize = size; 2586 2587 err = mOMX->setParameter( 2588 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2589 2590 if (err != OK) { 2591 return err; 2592 } 2593 2594 err = mOMX->getParameter( 2595 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2596 2597 if (err != OK) { 2598 return err; 2599 } 2600 2601 if (def.nBufferSize < size) { 2602 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2603 return FAILED_TRANSACTION; 2604 } 2605 2606 return OK; 2607} 2608 2609status_t ACodec::selectAudioPortFormat( 2610 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2611 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2612 InitOMXParams(&format); 2613 2614 format.nPortIndex = portIndex; 2615 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2616 format.nIndex = index; 2617 status_t err = mOMX->getParameter( 2618 mNode, OMX_IndexParamAudioPortFormat, 2619 &format, sizeof(format)); 2620 2621 if (err != OK) { 2622 return err; 2623 } 2624 2625 if (format.eEncoding == desiredFormat) { 2626 break; 2627 } 2628 2629 if (index == kMaxIndicesToCheck) { 2630 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2631 mComponentName.c_str(), index, 2632 asString(format.eEncoding), format.eEncoding); 2633 return ERROR_UNSUPPORTED; 2634 } 2635 } 2636 2637 return mOMX->setParameter( 2638 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2639} 2640 2641status_t ACodec::setupAACCodec( 2642 bool encoder, int32_t numChannels, int32_t sampleRate, 2643 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2644 int32_t maxOutputChannelCount, const drcParams_t& drc, 2645 int32_t pcmLimiterEnable) { 2646 if (encoder && isADTS) { 2647 return -EINVAL; 2648 } 2649 2650 status_t err = setupRawAudioFormat( 2651 encoder ? kPortIndexInput : kPortIndexOutput, 2652 sampleRate, 2653 numChannels); 2654 2655 if (err != OK) { 2656 return err; 2657 } 2658 2659 if (encoder) { 2660 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2661 2662 if (err != OK) { 2663 return err; 2664 } 2665 2666 OMX_PARAM_PORTDEFINITIONTYPE def; 2667 InitOMXParams(&def); 2668 def.nPortIndex = kPortIndexOutput; 2669 2670 err = mOMX->getParameter( 2671 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2672 2673 if (err != OK) { 2674 return err; 2675 } 2676 2677 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2678 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2679 2680 err = mOMX->setParameter( 2681 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2682 2683 if (err != OK) { 2684 return err; 2685 } 2686 2687 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2688 InitOMXParams(&profile); 2689 profile.nPortIndex = kPortIndexOutput; 2690 2691 err = mOMX->getParameter( 2692 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2693 2694 if (err != OK) { 2695 return err; 2696 } 2697 2698 profile.nChannels = numChannels; 2699 2700 profile.eChannelMode = 2701 (numChannels == 1) 2702 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2703 2704 profile.nSampleRate = sampleRate; 2705 profile.nBitRate = bitRate; 2706 profile.nAudioBandWidth = 0; 2707 profile.nFrameLength = 0; 2708 profile.nAACtools = OMX_AUDIO_AACToolAll; 2709 profile.nAACERtools = OMX_AUDIO_AACERNone; 2710 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2711 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2712 switch (sbrMode) { 2713 case 0: 2714 // disable sbr 2715 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2716 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2717 break; 2718 case 1: 2719 // enable single-rate sbr 2720 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2721 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2722 break; 2723 case 2: 2724 // enable dual-rate sbr 2725 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2726 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2727 break; 2728 case -1: 2729 // enable both modes -> the codec will decide which mode should be used 2730 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2731 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2732 break; 2733 default: 2734 // unsupported sbr mode 2735 return BAD_VALUE; 2736 } 2737 2738 2739 err = mOMX->setParameter( 2740 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2741 2742 if (err != OK) { 2743 return err; 2744 } 2745 2746 return err; 2747 } 2748 2749 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2750 InitOMXParams(&profile); 2751 profile.nPortIndex = kPortIndexInput; 2752 2753 err = mOMX->getParameter( 2754 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2755 2756 if (err != OK) { 2757 return err; 2758 } 2759 2760 profile.nChannels = numChannels; 2761 profile.nSampleRate = sampleRate; 2762 2763 profile.eAACStreamFormat = 2764 isADTS 2765 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2766 : OMX_AUDIO_AACStreamFormatMP4FF; 2767 2768 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2769 InitOMXParams(&presentation); 2770 presentation.nMaxOutputChannels = maxOutputChannelCount; 2771 presentation.nDrcCut = drc.drcCut; 2772 presentation.nDrcBoost = drc.drcBoost; 2773 presentation.nHeavyCompression = drc.heavyCompression; 2774 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2775 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2776 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2777 2778 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2779 if (res == OK) { 2780 // optional parameters, will not cause configuration failure 2781 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2782 &presentation, sizeof(presentation)); 2783 } else { 2784 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2785 } 2786 mSampleRate = sampleRate; 2787 return res; 2788} 2789 2790status_t ACodec::setupAC3Codec( 2791 bool encoder, int32_t numChannels, int32_t sampleRate) { 2792 status_t err = setupRawAudioFormat( 2793 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2794 2795 if (err != OK) { 2796 return err; 2797 } 2798 2799 if (encoder) { 2800 ALOGW("AC3 encoding is not supported."); 2801 return INVALID_OPERATION; 2802 } 2803 2804 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2805 InitOMXParams(&def); 2806 def.nPortIndex = kPortIndexInput; 2807 2808 err = mOMX->getParameter( 2809 mNode, 2810 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2811 &def, 2812 sizeof(def)); 2813 2814 if (err != OK) { 2815 return err; 2816 } 2817 2818 def.nChannels = numChannels; 2819 def.nSampleRate = sampleRate; 2820 2821 return mOMX->setParameter( 2822 mNode, 2823 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2824 &def, 2825 sizeof(def)); 2826} 2827 2828status_t ACodec::setupEAC3Codec( 2829 bool encoder, int32_t numChannels, int32_t sampleRate) { 2830 status_t err = setupRawAudioFormat( 2831 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2832 2833 if (err != OK) { 2834 return err; 2835 } 2836 2837 if (encoder) { 2838 ALOGW("EAC3 encoding is not supported."); 2839 return INVALID_OPERATION; 2840 } 2841 2842 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2843 InitOMXParams(&def); 2844 def.nPortIndex = kPortIndexInput; 2845 2846 err = mOMX->getParameter( 2847 mNode, 2848 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2849 &def, 2850 sizeof(def)); 2851 2852 if (err != OK) { 2853 return err; 2854 } 2855 2856 def.nChannels = numChannels; 2857 def.nSampleRate = sampleRate; 2858 2859 return mOMX->setParameter( 2860 mNode, 2861 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2862 &def, 2863 sizeof(def)); 2864} 2865 2866static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2867 bool isAMRWB, int32_t bps) { 2868 if (isAMRWB) { 2869 if (bps <= 6600) { 2870 return OMX_AUDIO_AMRBandModeWB0; 2871 } else if (bps <= 8850) { 2872 return OMX_AUDIO_AMRBandModeWB1; 2873 } else if (bps <= 12650) { 2874 return OMX_AUDIO_AMRBandModeWB2; 2875 } else if (bps <= 14250) { 2876 return OMX_AUDIO_AMRBandModeWB3; 2877 } else if (bps <= 15850) { 2878 return OMX_AUDIO_AMRBandModeWB4; 2879 } else if (bps <= 18250) { 2880 return OMX_AUDIO_AMRBandModeWB5; 2881 } else if (bps <= 19850) { 2882 return OMX_AUDIO_AMRBandModeWB6; 2883 } else if (bps <= 23050) { 2884 return OMX_AUDIO_AMRBandModeWB7; 2885 } 2886 2887 // 23850 bps 2888 return OMX_AUDIO_AMRBandModeWB8; 2889 } else { // AMRNB 2890 if (bps <= 4750) { 2891 return OMX_AUDIO_AMRBandModeNB0; 2892 } else if (bps <= 5150) { 2893 return OMX_AUDIO_AMRBandModeNB1; 2894 } else if (bps <= 5900) { 2895 return OMX_AUDIO_AMRBandModeNB2; 2896 } else if (bps <= 6700) { 2897 return OMX_AUDIO_AMRBandModeNB3; 2898 } else if (bps <= 7400) { 2899 return OMX_AUDIO_AMRBandModeNB4; 2900 } else if (bps <= 7950) { 2901 return OMX_AUDIO_AMRBandModeNB5; 2902 } else if (bps <= 10200) { 2903 return OMX_AUDIO_AMRBandModeNB6; 2904 } 2905 2906 // 12200 bps 2907 return OMX_AUDIO_AMRBandModeNB7; 2908 } 2909} 2910 2911status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2912 OMX_AUDIO_PARAM_AMRTYPE def; 2913 InitOMXParams(&def); 2914 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2915 2916 status_t err = 2917 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2918 2919 if (err != OK) { 2920 return err; 2921 } 2922 2923 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2924 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2925 2926 err = mOMX->setParameter( 2927 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2928 2929 if (err != OK) { 2930 return err; 2931 } 2932 2933 return setupRawAudioFormat( 2934 encoder ? kPortIndexInput : kPortIndexOutput, 2935 isWAMR ? 16000 : 8000 /* sampleRate */, 2936 1 /* numChannels */); 2937} 2938 2939status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2940 if (encoder) { 2941 return INVALID_OPERATION; 2942 } 2943 2944 return setupRawAudioFormat( 2945 kPortIndexInput, sampleRate, numChannels); 2946} 2947 2948status_t ACodec::setupFlacCodec( 2949 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2950 2951 if (encoder) { 2952 OMX_AUDIO_PARAM_FLACTYPE def; 2953 InitOMXParams(&def); 2954 def.nPortIndex = kPortIndexOutput; 2955 2956 // configure compression level 2957 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2958 if (err != OK) { 2959 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2960 return err; 2961 } 2962 def.nCompressionLevel = compressionLevel; 2963 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2964 if (err != OK) { 2965 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2966 return err; 2967 } 2968 } 2969 2970 return setupRawAudioFormat( 2971 encoder ? kPortIndexInput : kPortIndexOutput, 2972 sampleRate, 2973 numChannels); 2974} 2975 2976status_t ACodec::setupRawAudioFormat( 2977 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { 2978 OMX_PARAM_PORTDEFINITIONTYPE def; 2979 InitOMXParams(&def); 2980 def.nPortIndex = portIndex; 2981 2982 status_t err = mOMX->getParameter( 2983 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2984 2985 if (err != OK) { 2986 return err; 2987 } 2988 2989 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2990 2991 err = mOMX->setParameter( 2992 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2993 2994 if (err != OK) { 2995 return err; 2996 } 2997 2998 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2999 InitOMXParams(&pcmParams); 3000 pcmParams.nPortIndex = portIndex; 3001 3002 err = mOMX->getParameter( 3003 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 3004 3005 if (err != OK) { 3006 return err; 3007 } 3008 3009 pcmParams.nChannels = numChannels; 3010 switch (encoding) { 3011 case kAudioEncodingPcm8bit: 3012 pcmParams.eNumData = OMX_NumericalDataUnsigned; 3013 pcmParams.nBitPerSample = 8; 3014 break; 3015 case kAudioEncodingPcmFloat: 3016 pcmParams.eNumData = OMX_NumericalDataFloat; 3017 pcmParams.nBitPerSample = 32; 3018 break; 3019 case kAudioEncodingPcm16bit: 3020 pcmParams.eNumData = OMX_NumericalDataSigned; 3021 pcmParams.nBitPerSample = 16; 3022 break; 3023 default: 3024 return BAD_VALUE; 3025 } 3026 pcmParams.bInterleaved = OMX_TRUE; 3027 pcmParams.nSamplingRate = sampleRate; 3028 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 3029 3030 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 3031 return OMX_ErrorNone; 3032 } 3033 3034 err = mOMX->setParameter( 3035 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 3036 // if we could not set up raw format to non-16-bit, try with 16-bit 3037 // NOTE: we will also verify this via readback, in case codec ignores these fields 3038 if (err != OK && encoding != kAudioEncodingPcm16bit) { 3039 pcmParams.eNumData = OMX_NumericalDataSigned; 3040 pcmParams.nBitPerSample = 16; 3041 err = mOMX->setParameter( 3042 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 3043 } 3044 return err; 3045} 3046 3047status_t ACodec::configureTunneledVideoPlayback( 3048 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 3049 native_handle_t* sidebandHandle; 3050 3051 status_t err = mOMX->configureVideoTunnelMode( 3052 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 3053 if (err != OK) { 3054 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 3055 return err; 3056 } 3057 3058 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 3059 if (err != OK) { 3060 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 3061 sidebandHandle, err); 3062 return err; 3063 } 3064 3065 return OK; 3066} 3067 3068status_t ACodec::setVideoPortFormatType( 3069 OMX_U32 portIndex, 3070 OMX_VIDEO_CODINGTYPE compressionFormat, 3071 OMX_COLOR_FORMATTYPE colorFormat, 3072 bool usingNativeBuffers) { 3073 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 3074 InitOMXParams(&format); 3075 format.nPortIndex = portIndex; 3076 format.nIndex = 0; 3077 bool found = false; 3078 3079 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 3080 format.nIndex = index; 3081 status_t err = mOMX->getParameter( 3082 mNode, OMX_IndexParamVideoPortFormat, 3083 &format, sizeof(format)); 3084 3085 if (err != OK) { 3086 return err; 3087 } 3088 3089 // substitute back flexible color format to codec supported format 3090 OMX_U32 flexibleEquivalent; 3091 if (compressionFormat == OMX_VIDEO_CodingUnused 3092 && isFlexibleColorFormat( 3093 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 3094 && colorFormat == flexibleEquivalent) { 3095 ALOGI("[%s] using color format %#x in place of %#x", 3096 mComponentName.c_str(), format.eColorFormat, colorFormat); 3097 colorFormat = format.eColorFormat; 3098 } 3099 3100 // The following assertion is violated by TI's video decoder. 3101 // CHECK_EQ(format.nIndex, index); 3102 3103 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 3104 if (portIndex == kPortIndexInput 3105 && colorFormat == format.eColorFormat) { 3106 // eCompressionFormat does not seem right. 3107 found = true; 3108 break; 3109 } 3110 if (portIndex == kPortIndexOutput 3111 && compressionFormat == format.eCompressionFormat) { 3112 // eColorFormat does not seem right. 3113 found = true; 3114 break; 3115 } 3116 } 3117 3118 if (format.eCompressionFormat == compressionFormat 3119 && format.eColorFormat == colorFormat) { 3120 found = true; 3121 break; 3122 } 3123 3124 if (index == kMaxIndicesToCheck) { 3125 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 3126 mComponentName.c_str(), index, 3127 asString(format.eCompressionFormat), format.eCompressionFormat, 3128 asString(format.eColorFormat), format.eColorFormat); 3129 } 3130 } 3131 3132 if (!found) { 3133 return UNKNOWN_ERROR; 3134 } 3135 3136 status_t err = mOMX->setParameter( 3137 mNode, OMX_IndexParamVideoPortFormat, 3138 &format, sizeof(format)); 3139 3140 return err; 3141} 3142 3143// Set optimal output format. OMX component lists output formats in the order 3144// of preference, but this got more complicated since the introduction of flexible 3145// YUV formats. We support a legacy behavior for applications that do not use 3146// surface output, do not specify an output format, but expect a "usable" standard 3147// OMX format. SW readable and standard formats must be flex-YUV. 3148// 3149// Suggested preference order: 3150// - optimal format for texture rendering (mediaplayer behavior) 3151// - optimal SW readable & texture renderable format (flex-YUV support) 3152// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 3153// - legacy "usable" standard formats 3154// 3155// For legacy support, we prefer a standard format, but will settle for a SW readable 3156// flex-YUV format. 3157status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 3158 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 3159 InitOMXParams(&format); 3160 format.nPortIndex = kPortIndexOutput; 3161 3162 InitOMXParams(&legacyFormat); 3163 // this field will change when we find a suitable legacy format 3164 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 3165 3166 for (OMX_U32 index = 0; ; ++index) { 3167 format.nIndex = index; 3168 status_t err = mOMX->getParameter( 3169 mNode, OMX_IndexParamVideoPortFormat, 3170 &format, sizeof(format)); 3171 if (err != OK) { 3172 // no more formats, pick legacy format if found 3173 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 3174 memcpy(&format, &legacyFormat, sizeof(format)); 3175 break; 3176 } 3177 return err; 3178 } 3179 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 3180 return OMX_ErrorBadParameter; 3181 } 3182 if (!getLegacyFlexibleFormat) { 3183 break; 3184 } 3185 // standard formats that were exposed to users before 3186 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 3187 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 3188 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 3189 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 3190 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 3191 break; 3192 } 3193 // find best legacy non-standard format 3194 OMX_U32 flexibleEquivalent; 3195 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 3196 && isFlexibleColorFormat( 3197 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */, 3198 &flexibleEquivalent) 3199 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 3200 memcpy(&legacyFormat, &format, sizeof(format)); 3201 } 3202 } 3203 return mOMX->setParameter( 3204 mNode, OMX_IndexParamVideoPortFormat, 3205 &format, sizeof(format)); 3206} 3207 3208static const struct VideoCodingMapEntry { 3209 const char *mMime; 3210 OMX_VIDEO_CODINGTYPE mVideoCodingType; 3211} kVideoCodingMapEntry[] = { 3212 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 3213 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 3214 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 3215 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 3216 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 3217 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 3218 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 3219 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 3220}; 3221 3222static status_t GetVideoCodingTypeFromMime( 3223 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3224 for (size_t i = 0; 3225 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3226 ++i) { 3227 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3228 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3229 return OK; 3230 } 3231 } 3232 3233 *codingType = OMX_VIDEO_CodingUnused; 3234 3235 return ERROR_UNSUPPORTED; 3236} 3237 3238static status_t GetMimeTypeForVideoCoding( 3239 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3240 for (size_t i = 0; 3241 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3242 ++i) { 3243 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3244 *mime = kVideoCodingMapEntry[i].mMime; 3245 return OK; 3246 } 3247 } 3248 3249 mime->clear(); 3250 3251 return ERROR_UNSUPPORTED; 3252} 3253 3254status_t ACodec::setPortBufferNum(OMX_U32 portIndex, int bufferNum) { 3255 OMX_PARAM_PORTDEFINITIONTYPE def; 3256 InitOMXParams(&def); 3257 def.nPortIndex = portIndex; 3258 status_t err; 3259 ALOGD("Setting [%s] %s port buffer number: %d", mComponentName.c_str(), 3260 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3261 err = mOMX->getParameter( 3262 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3263 if (err != OK) { 3264 return err; 3265 } 3266 def.nBufferCountActual = bufferNum; 3267 err = mOMX->setParameter( 3268 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3269 if (err != OK) { 3270 // Component could reject this request. 3271 ALOGW("Fail to set [%s] %s port buffer number: %d", mComponentName.c_str(), 3272 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3273 } 3274 return OK; 3275} 3276 3277status_t ACodec::setupVideoDecoder( 3278 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3279 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3280 int32_t width, height; 3281 if (!msg->findInt32("width", &width) 3282 || !msg->findInt32("height", &height)) { 3283 return INVALID_OPERATION; 3284 } 3285 3286 OMX_VIDEO_CODINGTYPE compressionFormat; 3287 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3288 3289 if (err != OK) { 3290 return err; 3291 } 3292 3293 if (compressionFormat == OMX_VIDEO_CodingVP9) { 3294 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 3295 InitOMXParams(¶ms); 3296 params.nPortIndex = kPortIndexInput; 3297 // Check if VP9 decoder advertises supported profiles. 3298 params.nProfileIndex = 0; 3299 status_t err = mOMX->getParameter( 3300 mNode, 3301 OMX_IndexParamVideoProfileLevelQuerySupported, 3302 ¶ms, 3303 sizeof(params)); 3304 mIsLegacyVP9Decoder = err != OK; 3305 } 3306 3307 err = setVideoPortFormatType( 3308 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3309 3310 if (err != OK) { 3311 return err; 3312 } 3313 3314 int32_t tmp; 3315 if (msg->findInt32("color-format", &tmp)) { 3316 OMX_COLOR_FORMATTYPE colorFormat = 3317 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3318 err = setVideoPortFormatType( 3319 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3320 if (err != OK) { 3321 ALOGW("[%s] does not support color format %d", 3322 mComponentName.c_str(), colorFormat); 3323 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3324 } 3325 } else { 3326 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3327 } 3328 3329 if (err != OK) { 3330 return err; 3331 } 3332 3333 // Set the component input buffer number to be |tmp|. If succeed, 3334 // component will set input port buffer number to be |tmp|. If fail, 3335 // component will keep the same buffer number as before. 3336 if (msg->findInt32("android._num-input-buffers", &tmp)) { 3337 err = setPortBufferNum(kPortIndexInput, tmp); 3338 if (err != OK) 3339 return err; 3340 } 3341 3342 // Set the component output buffer number to be |tmp|. If succeed, 3343 // component will set output port buffer number to be |tmp|. If fail, 3344 // component will keep the same buffer number as before. 3345 if (msg->findInt32("android._num-output-buffers", &tmp)) { 3346 err = setPortBufferNum(kPortIndexOutput, tmp); 3347 if (err != OK) 3348 return err; 3349 } 3350 3351 int32_t frameRateInt; 3352 float frameRateFloat; 3353 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3354 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3355 frameRateInt = -1; 3356 } 3357 frameRateFloat = (float)frameRateInt; 3358 } 3359 3360 err = setVideoFormatOnPort( 3361 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3362 3363 if (err != OK) { 3364 return err; 3365 } 3366 3367 err = setVideoFormatOnPort( 3368 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3369 3370 if (err != OK) { 3371 return err; 3372 } 3373 3374 err = setColorAspectsForVideoDecoder( 3375 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3376 if (err == ERROR_UNSUPPORTED) { // support is optional 3377 err = OK; 3378 } 3379 3380 if (err != OK) { 3381 return err; 3382 } 3383 3384 err = setHDRStaticInfoForVideoCodec(kPortIndexOutput, msg, outputFormat); 3385 if (err == ERROR_UNSUPPORTED) { // support is optional 3386 err = OK; 3387 } 3388 return err; 3389} 3390 3391status_t ACodec::initDescribeColorAspectsIndex() { 3392 status_t err = mOMX->getExtensionIndex( 3393 mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3394 if (err != OK) { 3395 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3396 } 3397 return err; 3398} 3399 3400status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3401 status_t err = ERROR_UNSUPPORTED; 3402 if (mDescribeColorAspectsIndex) { 3403 err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3404 } 3405 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3406 mComponentName.c_str(), 3407 params.sAspects.mRange, asString(params.sAspects.mRange), 3408 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3409 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3410 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3411 err, asString(err)); 3412 3413 if (verify && err == OK) { 3414 err = getCodecColorAspects(params); 3415 } 3416 3417 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3418 "[%s] setting color aspects failed even though codec advertises support", 3419 mComponentName.c_str()); 3420 return err; 3421} 3422 3423status_t ACodec::setColorAspectsForVideoDecoder( 3424 int32_t width, int32_t height, bool usingNativeWindow, 3425 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3426 DescribeColorAspectsParams params; 3427 InitOMXParams(¶ms); 3428 params.nPortIndex = kPortIndexOutput; 3429 3430 getColorAspectsFromFormat(configFormat, params.sAspects); 3431 if (usingNativeWindow) { 3432 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3433 // The default aspects will be set back to the output format during the 3434 // getFormat phase of configure(). Set non-Unspecified values back into the 3435 // format, in case component does not support this enumeration. 3436 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3437 } 3438 3439 (void)initDescribeColorAspectsIndex(); 3440 3441 // communicate color aspects to codec 3442 return setCodecColorAspects(params); 3443} 3444 3445status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3446 status_t err = ERROR_UNSUPPORTED; 3447 if (mDescribeColorAspectsIndex) { 3448 err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3449 } 3450 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3451 mComponentName.c_str(), 3452 params.sAspects.mRange, asString(params.sAspects.mRange), 3453 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3454 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3455 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3456 err, asString(err)); 3457 if (params.bRequestingDataSpace) { 3458 ALOGV("for dataspace %#x", params.nDataSpace); 3459 } 3460 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3461 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3462 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3463 mComponentName.c_str()); 3464 } 3465 return err; 3466} 3467 3468status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3469 DescribeColorAspectsParams params; 3470 InitOMXParams(¶ms); 3471 params.nPortIndex = kPortIndexInput; 3472 status_t err = getCodecColorAspects(params); 3473 if (err == OK) { 3474 // we only set encoder input aspects if codec supports them 3475 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3476 } 3477 return err; 3478} 3479 3480status_t ACodec::getDataSpace( 3481 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3482 bool tryCodec) { 3483 status_t err = OK; 3484 if (tryCodec) { 3485 // request dataspace guidance from codec. 3486 params.bRequestingDataSpace = OMX_TRUE; 3487 err = getCodecColorAspects(params); 3488 params.bRequestingDataSpace = OMX_FALSE; 3489 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3490 *dataSpace = (android_dataspace)params.nDataSpace; 3491 return err; 3492 } else if (err == ERROR_UNSUPPORTED) { 3493 // ignore not-implemented error for dataspace requests 3494 err = OK; 3495 } 3496 } 3497 3498 // this returns legacy versions if available 3499 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3500 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3501 "and dataspace %#x", 3502 mComponentName.c_str(), 3503 params.sAspects.mRange, asString(params.sAspects.mRange), 3504 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3505 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3506 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3507 *dataSpace); 3508 return err; 3509} 3510 3511 3512status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3513 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3514 android_dataspace *dataSpace) { 3515 DescribeColorAspectsParams params; 3516 InitOMXParams(¶ms); 3517 params.nPortIndex = kPortIndexOutput; 3518 3519 // reset default format and get resulting format 3520 getColorAspectsFromFormat(configFormat, params.sAspects); 3521 if (dataSpace != NULL) { 3522 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3523 } 3524 status_t err = setCodecColorAspects(params, true /* readBack */); 3525 3526 // we always set specified aspects for decoders 3527 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3528 3529 if (dataSpace != NULL) { 3530 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3531 if (err == OK) { 3532 err = res; 3533 } 3534 } 3535 3536 return err; 3537} 3538 3539// initial video encoder setup for bytebuffer mode 3540status_t ACodec::setColorAspectsForVideoEncoder( 3541 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3542 // copy config to output format as this is not exposed via getFormat 3543 copyColorConfig(configFormat, outputFormat); 3544 3545 DescribeColorAspectsParams params; 3546 InitOMXParams(¶ms); 3547 params.nPortIndex = kPortIndexInput; 3548 getColorAspectsFromFormat(configFormat, params.sAspects); 3549 3550 (void)initDescribeColorAspectsIndex(); 3551 3552 int32_t usingRecorder; 3553 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3554 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3555 int32_t width, height; 3556 if (configFormat->findInt32("width", &width) 3557 && configFormat->findInt32("height", &height)) { 3558 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3559 status_t err = getDataSpace( 3560 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3561 if (err != OK) { 3562 return err; 3563 } 3564 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3565 } 3566 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3567 } 3568 3569 // communicate color aspects to codec, but do not allow change of the platform aspects 3570 ColorAspects origAspects = params.sAspects; 3571 for (int triesLeft = 2; --triesLeft >= 0; ) { 3572 status_t err = setCodecColorAspects(params, true /* readBack */); 3573 if (err != OK 3574 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3575 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3576 return err; 3577 } 3578 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3579 mComponentName.c_str()); 3580 } 3581 return OK; 3582} 3583 3584status_t ACodec::setHDRStaticInfoForVideoCodec( 3585 OMX_U32 portIndex, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3586 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3587 3588 DescribeHDRStaticInfoParams params; 3589 InitOMXParams(¶ms); 3590 params.nPortIndex = portIndex; 3591 3592 HDRStaticInfo *info = ¶ms.sInfo; 3593 if (getHDRStaticInfoFromFormat(configFormat, info)) { 3594 setHDRStaticInfoIntoFormat(params.sInfo, outputFormat); 3595 } 3596 3597 (void)initDescribeHDRStaticInfoIndex(); 3598 3599 // communicate HDR static Info to codec 3600 return setHDRStaticInfo(params); 3601} 3602 3603// subsequent initial video encoder setup for surface mode 3604status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3605 android_dataspace *dataSpace /* nonnull */) { 3606 DescribeColorAspectsParams params; 3607 InitOMXParams(¶ms); 3608 params.nPortIndex = kPortIndexInput; 3609 ColorAspects &aspects = params.sAspects; 3610 3611 // reset default format and store resulting format into both input and output formats 3612 getColorAspectsFromFormat(mConfigFormat, aspects); 3613 int32_t width, height; 3614 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3615 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3616 } 3617 setColorAspectsIntoFormat(aspects, mInputFormat); 3618 setColorAspectsIntoFormat(aspects, mOutputFormat); 3619 3620 // communicate color aspects to codec, but do not allow any change 3621 ColorAspects origAspects = aspects; 3622 status_t err = OK; 3623 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3624 status_t err = setCodecColorAspects(params, true /* readBack */); 3625 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3626 break; 3627 } 3628 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3629 mComponentName.c_str()); 3630 } 3631 3632 *dataSpace = HAL_DATASPACE_BT709; 3633 aspects = origAspects; // restore desired color aspects 3634 status_t res = getDataSpace( 3635 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3636 if (err == OK) { 3637 err = res; 3638 } 3639 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3640 mInputFormat->setBuffer( 3641 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3642 3643 // update input format with codec supported color aspects (basically set unsupported 3644 // aspects to Unspecified) 3645 if (err == OK) { 3646 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3647 } 3648 3649 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3650 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3651 3652 return err; 3653} 3654 3655status_t ACodec::getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format) { 3656 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3657 DescribeHDRStaticInfoParams params; 3658 InitOMXParams(¶ms); 3659 params.nPortIndex = portIndex; 3660 3661 status_t err = getHDRStaticInfo(params); 3662 if (err == OK) { 3663 // we only set decodec output HDRStaticInfo if codec supports them 3664 setHDRStaticInfoIntoFormat(params.sInfo, format); 3665 } 3666 return err; 3667} 3668 3669status_t ACodec::initDescribeHDRStaticInfoIndex() { 3670 status_t err = mOMX->getExtensionIndex( 3671 mNode, "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex); 3672 if (err != OK) { 3673 mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0; 3674 } 3675 return err; 3676} 3677 3678status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) { 3679 status_t err = ERROR_UNSUPPORTED; 3680 if (mDescribeHDRStaticInfoIndex) { 3681 err = mOMX->setConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3682 } 3683 3684 const HDRStaticInfo *info = ¶ms.sInfo; 3685 ALOGV("[%s] setting HDRStaticInfo (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, " 3686 "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)", 3687 mComponentName.c_str(), 3688 info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y, 3689 info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y, 3690 info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance, 3691 info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel); 3692 3693 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3694 "[%s] setting HDRStaticInfo failed even though codec advertises support", 3695 mComponentName.c_str()); 3696 return err; 3697} 3698 3699status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms) { 3700 status_t err = ERROR_UNSUPPORTED; 3701 if (mDescribeHDRStaticInfoIndex) { 3702 err = mOMX->getConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3703 } 3704 3705 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3706 "[%s] getting HDRStaticInfo failed even though codec advertises support", 3707 mComponentName.c_str()); 3708 return err; 3709} 3710 3711status_t ACodec::setupVideoEncoder( 3712 const char *mime, const sp<AMessage> &msg, 3713 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3714 int32_t tmp; 3715 if (!msg->findInt32("color-format", &tmp)) { 3716 return INVALID_OPERATION; 3717 } 3718 3719 OMX_COLOR_FORMATTYPE colorFormat = 3720 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3721 3722 status_t err = setVideoPortFormatType( 3723 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3724 3725 if (err != OK) { 3726 ALOGE("[%s] does not support color format %d", 3727 mComponentName.c_str(), colorFormat); 3728 3729 return err; 3730 } 3731 3732 /* Input port configuration */ 3733 3734 OMX_PARAM_PORTDEFINITIONTYPE def; 3735 InitOMXParams(&def); 3736 3737 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3738 3739 def.nPortIndex = kPortIndexInput; 3740 3741 err = mOMX->getParameter( 3742 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3743 3744 if (err != OK) { 3745 return err; 3746 } 3747 3748 int32_t width, height, bitrate; 3749 if (!msg->findInt32("width", &width) 3750 || !msg->findInt32("height", &height) 3751 || !msg->findInt32("bitrate", &bitrate)) { 3752 return INVALID_OPERATION; 3753 } 3754 3755 video_def->nFrameWidth = width; 3756 video_def->nFrameHeight = height; 3757 3758 int32_t stride; 3759 if (!msg->findInt32("stride", &stride)) { 3760 stride = width; 3761 } 3762 3763 video_def->nStride = stride; 3764 3765 int32_t sliceHeight; 3766 if (!msg->findInt32("slice-height", &sliceHeight)) { 3767 sliceHeight = height; 3768 } 3769 3770 video_def->nSliceHeight = sliceHeight; 3771 3772 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3773 3774 float frameRate; 3775 if (!msg->findFloat("frame-rate", &frameRate)) { 3776 int32_t tmp; 3777 if (!msg->findInt32("frame-rate", &tmp)) { 3778 return INVALID_OPERATION; 3779 } 3780 frameRate = (float)tmp; 3781 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3782 } 3783 3784 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3785 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3786 // this is redundant as it was already set up in setVideoPortFormatType 3787 // FIXME for now skip this only for flexible YUV formats 3788 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3789 video_def->eColorFormat = colorFormat; 3790 } 3791 3792 err = mOMX->setParameter( 3793 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3794 3795 if (err != OK) { 3796 ALOGE("[%s] failed to set input port definition parameters.", 3797 mComponentName.c_str()); 3798 3799 return err; 3800 } 3801 3802 /* Output port configuration */ 3803 3804 OMX_VIDEO_CODINGTYPE compressionFormat; 3805 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3806 3807 if (err != OK) { 3808 return err; 3809 } 3810 3811 err = setVideoPortFormatType( 3812 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3813 3814 if (err != OK) { 3815 ALOGE("[%s] does not support compression format %d", 3816 mComponentName.c_str(), compressionFormat); 3817 3818 return err; 3819 } 3820 3821 def.nPortIndex = kPortIndexOutput; 3822 3823 err = mOMX->getParameter( 3824 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3825 3826 if (err != OK) { 3827 return err; 3828 } 3829 3830 video_def->nFrameWidth = width; 3831 video_def->nFrameHeight = height; 3832 video_def->xFramerate = 0; 3833 video_def->nBitrate = bitrate; 3834 video_def->eCompressionFormat = compressionFormat; 3835 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3836 3837 err = mOMX->setParameter( 3838 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3839 3840 if (err != OK) { 3841 ALOGE("[%s] failed to set output port definition parameters.", 3842 mComponentName.c_str()); 3843 3844 return err; 3845 } 3846 3847 int32_t intraRefreshPeriod = 0; 3848 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3849 && intraRefreshPeriod >= 0) { 3850 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3851 if (err != OK) { 3852 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3853 mComponentName.c_str()); 3854 err = OK; 3855 } 3856 } 3857 3858 switch (compressionFormat) { 3859 case OMX_VIDEO_CodingMPEG4: 3860 err = setupMPEG4EncoderParameters(msg); 3861 break; 3862 3863 case OMX_VIDEO_CodingH263: 3864 err = setupH263EncoderParameters(msg); 3865 break; 3866 3867 case OMX_VIDEO_CodingAVC: 3868 err = setupAVCEncoderParameters(msg); 3869 break; 3870 3871 case OMX_VIDEO_CodingHEVC: 3872 err = setupHEVCEncoderParameters(msg); 3873 break; 3874 3875 case OMX_VIDEO_CodingVP8: 3876 case OMX_VIDEO_CodingVP9: 3877 err = setupVPXEncoderParameters(msg); 3878 break; 3879 3880 default: 3881 break; 3882 } 3883 3884 if (err != OK) { 3885 return err; 3886 } 3887 3888 // Set up color aspects on input, but propagate them to the output format, as they will 3889 // not be read back from encoder. 3890 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3891 if (err == ERROR_UNSUPPORTED) { 3892 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3893 err = OK; 3894 } 3895 3896 if (err != OK) { 3897 return err; 3898 } 3899 3900 err = setHDRStaticInfoForVideoCodec(kPortIndexInput, msg, outputFormat); 3901 if (err == ERROR_UNSUPPORTED) { // support is optional 3902 ALOGI("[%s] cannot encode HDR static metadata. Ignoring.", mComponentName.c_str()); 3903 err = OK; 3904 } 3905 3906 if (err != OK) { 3907 return err; 3908 } 3909 3910 switch (compressionFormat) { 3911 case OMX_VIDEO_CodingAVC: 3912 case OMX_VIDEO_CodingHEVC: 3913 err = configureTemporalLayers(msg, true /* inConfigure */, outputFormat); 3914 if (err != OK) { 3915 err = OK; // ignore failure 3916 } 3917 break; 3918 3919 case OMX_VIDEO_CodingVP8: 3920 case OMX_VIDEO_CodingVP9: 3921 // TODO: do we need to support android.generic layering? webrtc layering is 3922 // already set up in setupVPXEncoderParameters. 3923 break; 3924 3925 default: 3926 break; 3927 } 3928 3929 if (err == OK) { 3930 ALOGI("setupVideoEncoder succeeded"); 3931 } 3932 3933 return err; 3934} 3935 3936status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3937 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3938 InitOMXParams(¶ms); 3939 params.nPortIndex = kPortIndexOutput; 3940 3941 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3942 3943 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3944 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3945 int32_t mbs; 3946 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3947 return INVALID_OPERATION; 3948 } 3949 params.nCirMBs = mbs; 3950 } 3951 3952 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3953 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3954 int32_t mbs; 3955 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3956 return INVALID_OPERATION; 3957 } 3958 params.nAirMBs = mbs; 3959 3960 int32_t ref; 3961 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3962 return INVALID_OPERATION; 3963 } 3964 params.nAirRef = ref; 3965 } 3966 3967 status_t err = mOMX->setParameter( 3968 mNode, OMX_IndexParamVideoIntraRefresh, 3969 ¶ms, sizeof(params)); 3970 return err; 3971} 3972 3973static OMX_U32 setPFramesSpacing( 3974 int32_t iFramesInterval /* seconds */, int32_t frameRate, uint32_t BFramesSpacing = 0) { 3975 // BFramesSpacing is the number of B frames between I/P frames 3976 // PFramesSpacing (the value to be returned) is the number of P frames between I frames 3977 // 3978 // keyFrameInterval = ((PFramesSpacing + 1) * BFramesSpacing) + PFramesSpacing + 1 3979 // ^^^ ^^^ ^^^ 3980 // number of B frames number of P I frame 3981 // 3982 // = (PFramesSpacing + 1) * (BFramesSpacing + 1) 3983 // 3984 // E.g. 3985 // I P I : I-interval: 8, nPFrames 1, nBFrames 3 3986 // BBB BBB 3987 3988 if (iFramesInterval < 0) { // just 1 key frame 3989 return 0xFFFFFFFE; // don't use maxint as key-frame-interval calculation will add 1 3990 } else if (iFramesInterval == 0) { // just key frames 3991 return 0; 3992 } 3993 3994 // round down as key-frame-interval is an upper limit 3995 uint32_t keyFrameInterval = uint32_t(frameRate * iFramesInterval); 3996 OMX_U32 ret = keyFrameInterval / (BFramesSpacing + 1); 3997 return ret > 0 ? ret - 1 : 0; 3998} 3999 4000static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 4001 int32_t tmp; 4002 if (!msg->findInt32("bitrate-mode", &tmp)) { 4003 return OMX_Video_ControlRateVariable; 4004 } 4005 4006 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 4007} 4008 4009status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 4010 int32_t bitrate, iFrameInterval; 4011 if (!msg->findInt32("bitrate", &bitrate) 4012 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4013 return INVALID_OPERATION; 4014 } 4015 4016 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4017 4018 float frameRate; 4019 if (!msg->findFloat("frame-rate", &frameRate)) { 4020 int32_t tmp; 4021 if (!msg->findInt32("frame-rate", &tmp)) { 4022 return INVALID_OPERATION; 4023 } 4024 frameRate = (float)tmp; 4025 } 4026 4027 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 4028 InitOMXParams(&mpeg4type); 4029 mpeg4type.nPortIndex = kPortIndexOutput; 4030 4031 status_t err = mOMX->getParameter( 4032 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 4033 4034 if (err != OK) { 4035 return err; 4036 } 4037 4038 mpeg4type.nSliceHeaderSpacing = 0; 4039 mpeg4type.bSVH = OMX_FALSE; 4040 mpeg4type.bGov = OMX_FALSE; 4041 4042 mpeg4type.nAllowedPictureTypes = 4043 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4044 4045 mpeg4type.nBFrames = 0; 4046 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, mpeg4type.nBFrames); 4047 if (mpeg4type.nPFrames == 0) { 4048 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4049 } 4050 mpeg4type.nIDCVLCThreshold = 0; 4051 mpeg4type.bACPred = OMX_TRUE; 4052 mpeg4type.nMaxPacketSize = 256; 4053 mpeg4type.nTimeIncRes = 1000; 4054 mpeg4type.nHeaderExtension = 0; 4055 mpeg4type.bReversibleVLC = OMX_FALSE; 4056 4057 int32_t profile; 4058 if (msg->findInt32("profile", &profile)) { 4059 int32_t level; 4060 if (!msg->findInt32("level", &level)) { 4061 return INVALID_OPERATION; 4062 } 4063 4064 err = verifySupportForProfileAndLevel(profile, level); 4065 4066 if (err != OK) { 4067 return err; 4068 } 4069 4070 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 4071 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 4072 } 4073 4074 err = mOMX->setParameter( 4075 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 4076 4077 if (err != OK) { 4078 return err; 4079 } 4080 4081 err = configureBitrate(bitrate, bitrateMode); 4082 4083 if (err != OK) { 4084 return err; 4085 } 4086 4087 return setupErrorCorrectionParameters(); 4088} 4089 4090status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 4091 int32_t bitrate, iFrameInterval; 4092 if (!msg->findInt32("bitrate", &bitrate) 4093 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4094 return INVALID_OPERATION; 4095 } 4096 4097 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4098 4099 float frameRate; 4100 if (!msg->findFloat("frame-rate", &frameRate)) { 4101 int32_t tmp; 4102 if (!msg->findInt32("frame-rate", &tmp)) { 4103 return INVALID_OPERATION; 4104 } 4105 frameRate = (float)tmp; 4106 } 4107 4108 OMX_VIDEO_PARAM_H263TYPE h263type; 4109 InitOMXParams(&h263type); 4110 h263type.nPortIndex = kPortIndexOutput; 4111 4112 status_t err = mOMX->getParameter( 4113 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 4114 4115 if (err != OK) { 4116 return err; 4117 } 4118 4119 h263type.nAllowedPictureTypes = 4120 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4121 4122 h263type.nBFrames = 0; 4123 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h263type.nBFrames); 4124 if (h263type.nPFrames == 0) { 4125 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4126 } 4127 4128 int32_t profile; 4129 if (msg->findInt32("profile", &profile)) { 4130 int32_t level; 4131 if (!msg->findInt32("level", &level)) { 4132 return INVALID_OPERATION; 4133 } 4134 4135 err = verifySupportForProfileAndLevel(profile, level); 4136 4137 if (err != OK) { 4138 return err; 4139 } 4140 4141 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 4142 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 4143 } 4144 4145 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 4146 h263type.bForceRoundingTypeToZero = OMX_FALSE; 4147 h263type.nPictureHeaderRepetition = 0; 4148 h263type.nGOBHeaderInterval = 0; 4149 4150 err = mOMX->setParameter( 4151 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 4152 4153 if (err != OK) { 4154 return err; 4155 } 4156 4157 err = configureBitrate(bitrate, bitrateMode); 4158 4159 if (err != OK) { 4160 return err; 4161 } 4162 4163 return setupErrorCorrectionParameters(); 4164} 4165 4166// static 4167int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 4168 int width, int height, int rate, int bitrate, 4169 OMX_VIDEO_AVCPROFILETYPE profile) { 4170 // convert bitrate to main/baseline profile kbps equivalent 4171 switch (profile) { 4172 case OMX_VIDEO_AVCProfileHigh10: 4173 bitrate = divUp(bitrate, 3000); break; 4174 case OMX_VIDEO_AVCProfileHigh: 4175 bitrate = divUp(bitrate, 1250); break; 4176 default: 4177 bitrate = divUp(bitrate, 1000); break; 4178 } 4179 4180 // convert size and rate to MBs 4181 width = divUp(width, 16); 4182 height = divUp(height, 16); 4183 int mbs = width * height; 4184 rate *= mbs; 4185 int maxDimension = max(width, height); 4186 4187 static const int limits[][5] = { 4188 /* MBps MB dim bitrate level */ 4189 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 4190 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 4191 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 4192 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 4193 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 4194 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 4195 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 4196 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 4197 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 4198 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 4199 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 4200 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 4201 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 4202 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 4203 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 4204 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 4205 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 4206 }; 4207 4208 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 4209 const int (&limit)[5] = limits[i]; 4210 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 4211 && bitrate <= limit[3]) { 4212 return limit[4]; 4213 } 4214 } 4215 return 0; 4216} 4217 4218status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 4219 int32_t bitrate, iFrameInterval; 4220 if (!msg->findInt32("bitrate", &bitrate) 4221 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4222 return INVALID_OPERATION; 4223 } 4224 4225 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4226 4227 float frameRate; 4228 if (!msg->findFloat("frame-rate", &frameRate)) { 4229 int32_t tmp; 4230 if (!msg->findInt32("frame-rate", &tmp)) { 4231 return INVALID_OPERATION; 4232 } 4233 frameRate = (float)tmp; 4234 } 4235 4236 status_t err = OK; 4237 int32_t intraRefreshMode = 0; 4238 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 4239 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 4240 if (err != OK) { 4241 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 4242 err, intraRefreshMode); 4243 return err; 4244 } 4245 } 4246 4247 OMX_VIDEO_PARAM_AVCTYPE h264type; 4248 InitOMXParams(&h264type); 4249 h264type.nPortIndex = kPortIndexOutput; 4250 4251 err = mOMX->getParameter( 4252 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4253 4254 if (err != OK) { 4255 return err; 4256 } 4257 4258 h264type.nAllowedPictureTypes = 4259 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4260 4261 int32_t profile; 4262 if (msg->findInt32("profile", &profile)) { 4263 int32_t level; 4264 if (!msg->findInt32("level", &level)) { 4265 return INVALID_OPERATION; 4266 } 4267 4268 err = verifySupportForProfileAndLevel(profile, level); 4269 4270 if (err != OK) { 4271 return err; 4272 } 4273 4274 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 4275 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 4276 } else { 4277 // Use baseline profile for AVC recording if profile is not specified. 4278 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 4279 } 4280 4281 ALOGI("setupAVCEncoderParameters with [profile: %s] [level: %s]", 4282 asString(h264type.eProfile), asString(h264type.eLevel)); 4283 4284 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 4285 h264type.nSliceHeaderSpacing = 0; 4286 h264type.bUseHadamard = OMX_TRUE; 4287 h264type.nRefFrames = 1; 4288 h264type.nBFrames = 0; 4289 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4290 if (h264type.nPFrames == 0) { 4291 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4292 } 4293 h264type.nRefIdx10ActiveMinus1 = 0; 4294 h264type.nRefIdx11ActiveMinus1 = 0; 4295 h264type.bEntropyCodingCABAC = OMX_FALSE; 4296 h264type.bWeightedPPrediction = OMX_FALSE; 4297 h264type.bconstIpred = OMX_FALSE; 4298 h264type.bDirect8x8Inference = OMX_FALSE; 4299 h264type.bDirectSpatialTemporal = OMX_FALSE; 4300 h264type.nCabacInitIdc = 0; 4301 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 4302 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 4303 h264type.nSliceHeaderSpacing = 0; 4304 h264type.bUseHadamard = OMX_TRUE; 4305 h264type.nRefFrames = 2; 4306 h264type.nBFrames = 1; 4307 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4308 h264type.nAllowedPictureTypes = 4309 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 4310 h264type.nRefIdx10ActiveMinus1 = 0; 4311 h264type.nRefIdx11ActiveMinus1 = 0; 4312 h264type.bEntropyCodingCABAC = OMX_TRUE; 4313 h264type.bWeightedPPrediction = OMX_TRUE; 4314 h264type.bconstIpred = OMX_TRUE; 4315 h264type.bDirect8x8Inference = OMX_TRUE; 4316 h264type.bDirectSpatialTemporal = OMX_TRUE; 4317 h264type.nCabacInitIdc = 1; 4318 } 4319 4320 if (h264type.nBFrames != 0) { 4321 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 4322 } 4323 4324 h264type.bEnableUEP = OMX_FALSE; 4325 h264type.bEnableFMO = OMX_FALSE; 4326 h264type.bEnableASO = OMX_FALSE; 4327 h264type.bEnableRS = OMX_FALSE; 4328 h264type.bFrameMBsOnly = OMX_TRUE; 4329 h264type.bMBAFF = OMX_FALSE; 4330 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 4331 4332 err = mOMX->setParameter( 4333 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4334 4335 if (err != OK) { 4336 return err; 4337 } 4338 4339 return configureBitrate(bitrate, bitrateMode); 4340} 4341 4342status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 4343 int32_t bitrate, iFrameInterval; 4344 if (!msg->findInt32("bitrate", &bitrate) 4345 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4346 return INVALID_OPERATION; 4347 } 4348 4349 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4350 4351 float frameRate; 4352 if (!msg->findFloat("frame-rate", &frameRate)) { 4353 int32_t tmp; 4354 if (!msg->findInt32("frame-rate", &tmp)) { 4355 return INVALID_OPERATION; 4356 } 4357 frameRate = (float)tmp; 4358 } 4359 4360 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 4361 InitOMXParams(&hevcType); 4362 hevcType.nPortIndex = kPortIndexOutput; 4363 4364 status_t err = OK; 4365 err = mOMX->getParameter( 4366 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4367 if (err != OK) { 4368 return err; 4369 } 4370 4371 int32_t profile; 4372 if (msg->findInt32("profile", &profile)) { 4373 int32_t level; 4374 if (!msg->findInt32("level", &level)) { 4375 return INVALID_OPERATION; 4376 } 4377 4378 err = verifySupportForProfileAndLevel(profile, level); 4379 if (err != OK) { 4380 return err; 4381 } 4382 4383 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 4384 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 4385 } 4386 // TODO: finer control? 4387 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1; 4388 4389 err = mOMX->setParameter( 4390 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4391 if (err != OK) { 4392 return err; 4393 } 4394 4395 return configureBitrate(bitrate, bitrateMode); 4396} 4397 4398status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { 4399 int32_t bitrate; 4400 int32_t iFrameInterval = 0; 4401 size_t tsLayers = 0; 4402 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 4403 OMX_VIDEO_VPXTemporalLayerPatternNone; 4404 static const uint32_t kVp8LayerRateAlloction 4405 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 4406 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 4407 {100, 100, 100}, // 1 layer 4408 { 60, 100, 100}, // 2 layers {60%, 40%} 4409 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 4410 }; 4411 if (!msg->findInt32("bitrate", &bitrate)) { 4412 return INVALID_OPERATION; 4413 } 4414 msg->findInt32("i-frame-interval", &iFrameInterval); 4415 4416 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4417 4418 float frameRate; 4419 if (!msg->findFloat("frame-rate", &frameRate)) { 4420 int32_t tmp; 4421 if (!msg->findInt32("frame-rate", &tmp)) { 4422 return INVALID_OPERATION; 4423 } 4424 frameRate = (float)tmp; 4425 } 4426 4427 AString tsSchema; 4428 if (msg->findString("ts-schema", &tsSchema)) { 4429 unsigned int numLayers = 0; 4430 unsigned int numBLayers = 0; 4431 int tags; 4432 char dummy; 4433 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1 4434 && numLayers > 0) { 4435 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4436 tsLayers = numLayers; 4437 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c", 4438 &numLayers, &dummy, &numBLayers, &dummy)) 4439 && (tags == 1 || (tags == 3 && dummy == '+')) 4440 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) { 4441 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4442 // VPX does not have a concept of B-frames, so just count all layers 4443 tsLayers = numLayers + numBLayers; 4444 } else { 4445 ALOGW("Ignoring unsupported ts-schema [%s]", tsSchema.c_str()); 4446 } 4447 tsLayers = min(tsLayers, (size_t)OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS); 4448 } 4449 4450 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4451 InitOMXParams(&vp8type); 4452 vp8type.nPortIndex = kPortIndexOutput; 4453 status_t err = mOMX->getParameter( 4454 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4455 &vp8type, sizeof(vp8type)); 4456 4457 if (err == OK) { 4458 if (iFrameInterval > 0) { 4459 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1; 4460 } 4461 vp8type.eTemporalPattern = pattern; 4462 vp8type.nTemporalLayerCount = tsLayers; 4463 if (tsLayers > 0) { 4464 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4465 vp8type.nTemporalLayerBitrateRatio[i] = 4466 kVp8LayerRateAlloction[tsLayers - 1][i]; 4467 } 4468 } 4469 if (bitrateMode == OMX_Video_ControlRateConstant) { 4470 vp8type.nMinQuantizer = 2; 4471 vp8type.nMaxQuantizer = 63; 4472 } 4473 4474 err = mOMX->setParameter( 4475 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4476 &vp8type, sizeof(vp8type)); 4477 if (err != OK) { 4478 ALOGW("Extended VP8 parameters set failed: %d", err); 4479 } 4480 } 4481 4482 return configureBitrate(bitrate, bitrateMode); 4483} 4484 4485status_t ACodec::verifySupportForProfileAndLevel( 4486 int32_t profile, int32_t level) { 4487 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4488 InitOMXParams(¶ms); 4489 params.nPortIndex = kPortIndexOutput; 4490 4491 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4492 params.nProfileIndex = index; 4493 status_t err = mOMX->getParameter( 4494 mNode, 4495 OMX_IndexParamVideoProfileLevelQuerySupported, 4496 ¶ms, 4497 sizeof(params)); 4498 4499 if (err != OK) { 4500 return err; 4501 } 4502 4503 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4504 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4505 4506 if (profile == supportedProfile && level <= supportedLevel) { 4507 return OK; 4508 } 4509 4510 if (index == kMaxIndicesToCheck) { 4511 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4512 mComponentName.c_str(), index, 4513 params.eProfile, params.eLevel); 4514 } 4515 } 4516 return ERROR_UNSUPPORTED; 4517} 4518 4519status_t ACodec::configureBitrate( 4520 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4521 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4522 InitOMXParams(&bitrateType); 4523 bitrateType.nPortIndex = kPortIndexOutput; 4524 4525 status_t err = mOMX->getParameter( 4526 mNode, OMX_IndexParamVideoBitrate, 4527 &bitrateType, sizeof(bitrateType)); 4528 4529 if (err != OK) { 4530 return err; 4531 } 4532 4533 bitrateType.eControlRate = bitrateMode; 4534 bitrateType.nTargetBitrate = bitrate; 4535 4536 return mOMX->setParameter( 4537 mNode, OMX_IndexParamVideoBitrate, 4538 &bitrateType, sizeof(bitrateType)); 4539} 4540 4541status_t ACodec::setupErrorCorrectionParameters() { 4542 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4543 InitOMXParams(&errorCorrectionType); 4544 errorCorrectionType.nPortIndex = kPortIndexOutput; 4545 4546 status_t err = mOMX->getParameter( 4547 mNode, OMX_IndexParamVideoErrorCorrection, 4548 &errorCorrectionType, sizeof(errorCorrectionType)); 4549 4550 if (err != OK) { 4551 return OK; // Optional feature. Ignore this failure 4552 } 4553 4554 errorCorrectionType.bEnableHEC = OMX_FALSE; 4555 errorCorrectionType.bEnableResync = OMX_TRUE; 4556 errorCorrectionType.nResynchMarkerSpacing = 256; 4557 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4558 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4559 4560 return mOMX->setParameter( 4561 mNode, OMX_IndexParamVideoErrorCorrection, 4562 &errorCorrectionType, sizeof(errorCorrectionType)); 4563} 4564 4565status_t ACodec::setVideoFormatOnPort( 4566 OMX_U32 portIndex, 4567 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4568 float frameRate) { 4569 OMX_PARAM_PORTDEFINITIONTYPE def; 4570 InitOMXParams(&def); 4571 def.nPortIndex = portIndex; 4572 4573 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4574 4575 status_t err = mOMX->getParameter( 4576 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4577 if (err != OK) { 4578 return err; 4579 } 4580 4581 if (portIndex == kPortIndexInput) { 4582 // XXX Need a (much) better heuristic to compute input buffer sizes. 4583 const size_t X = 64 * 1024; 4584 if (def.nBufferSize < X) { 4585 def.nBufferSize = X; 4586 } 4587 } 4588 4589 if (def.eDomain != OMX_PortDomainVideo) { 4590 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4591 return FAILED_TRANSACTION; 4592 } 4593 4594 video_def->nFrameWidth = width; 4595 video_def->nFrameHeight = height; 4596 4597 if (portIndex == kPortIndexInput) { 4598 video_def->eCompressionFormat = compressionFormat; 4599 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4600 if (frameRate >= 0) { 4601 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4602 } 4603 } 4604 4605 err = mOMX->setParameter( 4606 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4607 4608 return err; 4609} 4610 4611status_t ACodec::initNativeWindow() { 4612 if (mNativeWindow != NULL) { 4613 return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4614 } 4615 4616 mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4617 return OK; 4618} 4619 4620size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4621 size_t n = 0; 4622 4623 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4624 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4625 4626 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4627 ++n; 4628 } 4629 } 4630 4631 return n; 4632} 4633 4634size_t ACodec::countBuffersOwnedByNativeWindow() const { 4635 size_t n = 0; 4636 4637 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4638 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4639 4640 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4641 ++n; 4642 } 4643 } 4644 4645 return n; 4646} 4647 4648void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4649 if (mNativeWindow == NULL) { 4650 return; 4651 } 4652 4653 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4654 && dequeueBufferFromNativeWindow() != NULL) { 4655 // these buffers will be submitted as regular buffers; account for this 4656 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4657 --mMetadataBuffersToSubmit; 4658 } 4659 } 4660} 4661 4662bool ACodec::allYourBuffersAreBelongToUs( 4663 OMX_U32 portIndex) { 4664 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4665 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4666 4667 if (info->mStatus != BufferInfo::OWNED_BY_US 4668 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4669 ALOGV("[%s] Buffer %u on port %u still has status %d", 4670 mComponentName.c_str(), 4671 info->mBufferID, portIndex, info->mStatus); 4672 return false; 4673 } 4674 } 4675 4676 return true; 4677} 4678 4679bool ACodec::allYourBuffersAreBelongToUs() { 4680 return allYourBuffersAreBelongToUs(kPortIndexInput) 4681 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4682} 4683 4684void ACodec::deferMessage(const sp<AMessage> &msg) { 4685 mDeferredQueue.push_back(msg); 4686} 4687 4688void ACodec::processDeferredMessages() { 4689 List<sp<AMessage> > queue = mDeferredQueue; 4690 mDeferredQueue.clear(); 4691 4692 List<sp<AMessage> >::iterator it = queue.begin(); 4693 while (it != queue.end()) { 4694 onMessageReceived(*it++); 4695 } 4696} 4697 4698// static 4699bool ACodec::describeDefaultColorFormat(DescribeColorFormat2Params ¶ms) { 4700 MediaImage2 &image = params.sMediaImage; 4701 memset(&image, 0, sizeof(image)); 4702 4703 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4704 image.mNumPlanes = 0; 4705 4706 const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat; 4707 image.mWidth = params.nFrameWidth; 4708 image.mHeight = params.nFrameHeight; 4709 4710 // only supporting YUV420 4711 if (fmt != OMX_COLOR_FormatYUV420Planar && 4712 fmt != OMX_COLOR_FormatYUV420PackedPlanar && 4713 fmt != OMX_COLOR_FormatYUV420SemiPlanar && 4714 fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar && 4715 fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) { 4716 ALOGW("do not know color format 0x%x = %d", fmt, fmt); 4717 return false; 4718 } 4719 4720 // TEMPORARY FIX for some vendors that advertise sliceHeight as 0 4721 if (params.nStride != 0 && params.nSliceHeight == 0) { 4722 ALOGW("using sliceHeight=%u instead of what codec advertised (=0)", 4723 params.nFrameHeight); 4724 params.nSliceHeight = params.nFrameHeight; 4725 } 4726 4727 // we need stride and slice-height to be non-zero and sensible. These values were chosen to 4728 // prevent integer overflows further down the line, and do not indicate support for 4729 // 32kx32k video. 4730 if (params.nStride == 0 || params.nSliceHeight == 0 4731 || params.nStride > 32768 || params.nSliceHeight > 32768) { 4732 ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u", 4733 fmt, fmt, params.nStride, params.nSliceHeight); 4734 return false; 4735 } 4736 4737 // set-up YUV format 4738 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV; 4739 image.mNumPlanes = 3; 4740 image.mBitDepth = 8; 4741 image.mBitDepthAllocated = 8; 4742 image.mPlane[image.Y].mOffset = 0; 4743 image.mPlane[image.Y].mColInc = 1; 4744 image.mPlane[image.Y].mRowInc = params.nStride; 4745 image.mPlane[image.Y].mHorizSubsampling = 1; 4746 image.mPlane[image.Y].mVertSubsampling = 1; 4747 4748 switch ((int)fmt) { 4749 case HAL_PIXEL_FORMAT_YV12: 4750 if (params.bUsingNativeBuffers) { 4751 size_t ystride = align(params.nStride, 16); 4752 size_t cstride = align(params.nStride / 2, 16); 4753 image.mPlane[image.Y].mRowInc = ystride; 4754 4755 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight; 4756 image.mPlane[image.V].mColInc = 1; 4757 image.mPlane[image.V].mRowInc = cstride; 4758 image.mPlane[image.V].mHorizSubsampling = 2; 4759 image.mPlane[image.V].mVertSubsampling = 2; 4760 4761 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset 4762 + (cstride * params.nSliceHeight / 2); 4763 image.mPlane[image.U].mColInc = 1; 4764 image.mPlane[image.U].mRowInc = cstride; 4765 image.mPlane[image.U].mHorizSubsampling = 2; 4766 image.mPlane[image.U].mVertSubsampling = 2; 4767 break; 4768 } else { 4769 // fall through as YV12 is used for YUV420Planar by some codecs 4770 } 4771 4772 case OMX_COLOR_FormatYUV420Planar: 4773 case OMX_COLOR_FormatYUV420PackedPlanar: 4774 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4775 image.mPlane[image.U].mColInc = 1; 4776 image.mPlane[image.U].mRowInc = params.nStride / 2; 4777 image.mPlane[image.U].mHorizSubsampling = 2; 4778 image.mPlane[image.U].mVertSubsampling = 2; 4779 4780 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset 4781 + (params.nStride * params.nSliceHeight / 4); 4782 image.mPlane[image.V].mColInc = 1; 4783 image.mPlane[image.V].mRowInc = params.nStride / 2; 4784 image.mPlane[image.V].mHorizSubsampling = 2; 4785 image.mPlane[image.V].mVertSubsampling = 2; 4786 break; 4787 4788 case OMX_COLOR_FormatYUV420SemiPlanar: 4789 // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder 4790 case OMX_COLOR_FormatYUV420PackedSemiPlanar: 4791 // NV12 4792 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4793 image.mPlane[image.U].mColInc = 2; 4794 image.mPlane[image.U].mRowInc = params.nStride; 4795 image.mPlane[image.U].mHorizSubsampling = 2; 4796 image.mPlane[image.U].mVertSubsampling = 2; 4797 4798 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1; 4799 image.mPlane[image.V].mColInc = 2; 4800 image.mPlane[image.V].mRowInc = params.nStride; 4801 image.mPlane[image.V].mHorizSubsampling = 2; 4802 image.mPlane[image.V].mVertSubsampling = 2; 4803 break; 4804 4805 default: 4806 TRESPASS(); 4807 } 4808 return true; 4809} 4810 4811// static 4812bool ACodec::describeColorFormat( 4813 const sp<IOMX> &omx, IOMX::node_id node, 4814 DescribeColorFormat2Params &describeParams) 4815{ 4816 OMX_INDEXTYPE describeColorFormatIndex; 4817 if (omx->getExtensionIndex( 4818 node, "OMX.google.android.index.describeColorFormat", 4819 &describeColorFormatIndex) == OK) { 4820 DescribeColorFormatParams describeParamsV1(describeParams); 4821 if (omx->getParameter( 4822 node, describeColorFormatIndex, 4823 &describeParamsV1, sizeof(describeParamsV1)) == OK) { 4824 describeParams.initFromV1(describeParamsV1); 4825 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4826 } 4827 } else if (omx->getExtensionIndex( 4828 node, "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK 4829 && omx->getParameter( 4830 node, describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) { 4831 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4832 } 4833 4834 return describeDefaultColorFormat(describeParams); 4835} 4836 4837// static 4838bool ACodec::isFlexibleColorFormat( 4839 const sp<IOMX> &omx, IOMX::node_id node, 4840 uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) { 4841 DescribeColorFormat2Params describeParams; 4842 InitOMXParams(&describeParams); 4843 describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; 4844 // reasonable dummy values 4845 describeParams.nFrameWidth = 128; 4846 describeParams.nFrameHeight = 128; 4847 describeParams.nStride = 128; 4848 describeParams.nSliceHeight = 128; 4849 describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers; 4850 4851 CHECK(flexibleEquivalent != NULL); 4852 4853 if (!describeColorFormat(omx, node, describeParams)) { 4854 return false; 4855 } 4856 4857 const MediaImage2 &img = describeParams.sMediaImage; 4858 if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) { 4859 if (img.mNumPlanes != 3 4860 || img.mPlane[img.Y].mHorizSubsampling != 1 4861 || img.mPlane[img.Y].mVertSubsampling != 1) { 4862 return false; 4863 } 4864 4865 // YUV 420 4866 if (img.mPlane[img.U].mHorizSubsampling == 2 4867 && img.mPlane[img.U].mVertSubsampling == 2 4868 && img.mPlane[img.V].mHorizSubsampling == 2 4869 && img.mPlane[img.V].mVertSubsampling == 2) { 4870 // possible flexible YUV420 format 4871 if (img.mBitDepth <= 8) { 4872 *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible; 4873 return true; 4874 } 4875 } 4876 } 4877 return false; 4878} 4879 4880status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4881 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4882 OMX_PARAM_PORTDEFINITIONTYPE def; 4883 InitOMXParams(&def); 4884 def.nPortIndex = portIndex; 4885 4886 status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4887 if (err != OK) { 4888 return err; 4889 } 4890 4891 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4892 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4893 return BAD_VALUE; 4894 } 4895 4896 switch (def.eDomain) { 4897 case OMX_PortDomainVideo: 4898 { 4899 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4900 switch ((int)videoDef->eCompressionFormat) { 4901 case OMX_VIDEO_CodingUnused: 4902 { 4903 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4904 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4905 4906 notify->setInt32("stride", videoDef->nStride); 4907 notify->setInt32("slice-height", videoDef->nSliceHeight); 4908 notify->setInt32("color-format", videoDef->eColorFormat); 4909 4910 if (mNativeWindow == NULL) { 4911 DescribeColorFormat2Params describeParams; 4912 InitOMXParams(&describeParams); 4913 describeParams.eColorFormat = videoDef->eColorFormat; 4914 describeParams.nFrameWidth = videoDef->nFrameWidth; 4915 describeParams.nFrameHeight = videoDef->nFrameHeight; 4916 describeParams.nStride = videoDef->nStride; 4917 describeParams.nSliceHeight = videoDef->nSliceHeight; 4918 describeParams.bUsingNativeBuffers = OMX_FALSE; 4919 4920 if (describeColorFormat(mOMX, mNode, describeParams)) { 4921 notify->setBuffer( 4922 "image-data", 4923 ABuffer::CreateAsCopy( 4924 &describeParams.sMediaImage, 4925 sizeof(describeParams.sMediaImage))); 4926 4927 MediaImage2 &img = describeParams.sMediaImage; 4928 MediaImage2::PlaneInfo *plane = img.mPlane; 4929 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4930 mComponentName.c_str(), img.mWidth, img.mHeight, 4931 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4932 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4933 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4934 } 4935 } 4936 4937 int32_t width = (int32_t)videoDef->nFrameWidth; 4938 int32_t height = (int32_t)videoDef->nFrameHeight; 4939 4940 if (portIndex == kPortIndexOutput) { 4941 OMX_CONFIG_RECTTYPE rect; 4942 InitOMXParams(&rect); 4943 rect.nPortIndex = portIndex; 4944 4945 if (mOMX->getConfig( 4946 mNode, 4947 (portIndex == kPortIndexOutput ? 4948 OMX_IndexConfigCommonOutputCrop : 4949 OMX_IndexConfigCommonInputCrop), 4950 &rect, sizeof(rect)) != OK) { 4951 rect.nLeft = 0; 4952 rect.nTop = 0; 4953 rect.nWidth = videoDef->nFrameWidth; 4954 rect.nHeight = videoDef->nFrameHeight; 4955 } 4956 4957 if (rect.nLeft < 0 || 4958 rect.nTop < 0 || 4959 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4960 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4961 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 4962 rect.nLeft, rect.nTop, 4963 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 4964 videoDef->nFrameWidth, videoDef->nFrameHeight); 4965 return BAD_VALUE; 4966 } 4967 4968 notify->setRect( 4969 "crop", 4970 rect.nLeft, 4971 rect.nTop, 4972 rect.nLeft + rect.nWidth - 1, 4973 rect.nTop + rect.nHeight - 1); 4974 4975 width = rect.nWidth; 4976 height = rect.nHeight; 4977 4978 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4979 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4980 width, height, mConfigFormat, notify, 4981 mUsingNativeWindow ? &dataSpace : NULL); 4982 if (mUsingNativeWindow) { 4983 notify->setInt32("android._dataspace", dataSpace); 4984 } 4985 (void)getHDRStaticInfoForVideoCodec(kPortIndexOutput, notify); 4986 } else { 4987 (void)getInputColorAspectsForVideoEncoder(notify); 4988 if (mConfigFormat->contains("hdr-static-info")) { 4989 (void)getHDRStaticInfoForVideoCodec(kPortIndexInput, notify); 4990 } 4991 } 4992 4993 break; 4994 } 4995 4996 case OMX_VIDEO_CodingVP8: 4997 case OMX_VIDEO_CodingVP9: 4998 { 4999 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 5000 InitOMXParams(&vp8type); 5001 vp8type.nPortIndex = kPortIndexOutput; 5002 status_t err = mOMX->getParameter( 5003 mNode, 5004 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 5005 &vp8type, 5006 sizeof(vp8type)); 5007 5008 if (err == OK) { 5009 AString tsSchema = "none"; 5010 if (vp8type.eTemporalPattern 5011 == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { 5012 switch (vp8type.nTemporalLayerCount) { 5013 case 1: 5014 { 5015 tsSchema = "webrtc.vp8.1-layer"; 5016 break; 5017 } 5018 case 2: 5019 { 5020 tsSchema = "webrtc.vp8.2-layer"; 5021 break; 5022 } 5023 case 3: 5024 { 5025 tsSchema = "webrtc.vp8.3-layer"; 5026 break; 5027 } 5028 default: 5029 { 5030 break; 5031 } 5032 } 5033 } 5034 notify->setString("ts-schema", tsSchema); 5035 } 5036 // Fall through to set up mime. 5037 } 5038 5039 default: 5040 { 5041 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 5042 // should be CodingUnused 5043 ALOGE("Raw port video compression format is %s(%d)", 5044 asString(videoDef->eCompressionFormat), 5045 videoDef->eCompressionFormat); 5046 return BAD_VALUE; 5047 } 5048 AString mime; 5049 if (GetMimeTypeForVideoCoding( 5050 videoDef->eCompressionFormat, &mime) != OK) { 5051 notify->setString("mime", "application/octet-stream"); 5052 } else { 5053 notify->setString("mime", mime.c_str()); 5054 } 5055 uint32_t intraRefreshPeriod = 0; 5056 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 5057 && intraRefreshPeriod > 0) { 5058 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 5059 } 5060 break; 5061 } 5062 } 5063 notify->setInt32("width", videoDef->nFrameWidth); 5064 notify->setInt32("height", videoDef->nFrameHeight); 5065 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 5066 portIndex == kPortIndexInput ? "input" : "output", 5067 notify->debugString().c_str()); 5068 5069 break; 5070 } 5071 5072 case OMX_PortDomainAudio: 5073 { 5074 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 5075 5076 switch ((int)audioDef->eEncoding) { 5077 case OMX_AUDIO_CodingPCM: 5078 { 5079 OMX_AUDIO_PARAM_PCMMODETYPE params; 5080 InitOMXParams(¶ms); 5081 params.nPortIndex = portIndex; 5082 5083 err = mOMX->getParameter( 5084 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5085 if (err != OK) { 5086 return err; 5087 } 5088 5089 if (params.nChannels <= 0 5090 || (params.nChannels != 1 && !params.bInterleaved) 5091 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 5092 ALOGE("unsupported PCM port: %u channels%s, %u-bit", 5093 params.nChannels, 5094 params.bInterleaved ? " interleaved" : "", 5095 params.nBitPerSample); 5096 return FAILED_TRANSACTION; 5097 } 5098 5099 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 5100 notify->setInt32("channel-count", params.nChannels); 5101 notify->setInt32("sample-rate", params.nSamplingRate); 5102 5103 AudioEncoding encoding = kAudioEncodingPcm16bit; 5104 if (params.eNumData == OMX_NumericalDataUnsigned 5105 && params.nBitPerSample == 8u) { 5106 encoding = kAudioEncodingPcm8bit; 5107 } else if (params.eNumData == OMX_NumericalDataFloat 5108 && params.nBitPerSample == 32u) { 5109 encoding = kAudioEncodingPcmFloat; 5110 } else if (params.nBitPerSample != 16u 5111 || params.eNumData != OMX_NumericalDataSigned) { 5112 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", 5113 asString(params.eNumData), params.eNumData, 5114 asString(params.ePCMMode), params.ePCMMode); 5115 return FAILED_TRANSACTION; 5116 } 5117 notify->setInt32("pcm-encoding", encoding); 5118 5119 if (mChannelMaskPresent) { 5120 notify->setInt32("channel-mask", mChannelMask); 5121 } 5122 break; 5123 } 5124 5125 case OMX_AUDIO_CodingAAC: 5126 { 5127 OMX_AUDIO_PARAM_AACPROFILETYPE params; 5128 InitOMXParams(¶ms); 5129 params.nPortIndex = portIndex; 5130 5131 err = mOMX->getParameter( 5132 mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 5133 if (err != OK) { 5134 return err; 5135 } 5136 5137 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 5138 notify->setInt32("channel-count", params.nChannels); 5139 notify->setInt32("sample-rate", params.nSampleRate); 5140 break; 5141 } 5142 5143 case OMX_AUDIO_CodingAMR: 5144 { 5145 OMX_AUDIO_PARAM_AMRTYPE params; 5146 InitOMXParams(¶ms); 5147 params.nPortIndex = portIndex; 5148 5149 err = mOMX->getParameter( 5150 mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 5151 if (err != OK) { 5152 return err; 5153 } 5154 5155 notify->setInt32("channel-count", 1); 5156 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 5157 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 5158 notify->setInt32("sample-rate", 16000); 5159 } else { 5160 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 5161 notify->setInt32("sample-rate", 8000); 5162 } 5163 break; 5164 } 5165 5166 case OMX_AUDIO_CodingFLAC: 5167 { 5168 OMX_AUDIO_PARAM_FLACTYPE params; 5169 InitOMXParams(¶ms); 5170 params.nPortIndex = portIndex; 5171 5172 err = mOMX->getParameter( 5173 mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 5174 if (err != OK) { 5175 return err; 5176 } 5177 5178 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 5179 notify->setInt32("channel-count", params.nChannels); 5180 notify->setInt32("sample-rate", params.nSampleRate); 5181 break; 5182 } 5183 5184 case OMX_AUDIO_CodingMP3: 5185 { 5186 OMX_AUDIO_PARAM_MP3TYPE params; 5187 InitOMXParams(¶ms); 5188 params.nPortIndex = portIndex; 5189 5190 err = mOMX->getParameter( 5191 mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 5192 if (err != OK) { 5193 return err; 5194 } 5195 5196 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 5197 notify->setInt32("channel-count", params.nChannels); 5198 notify->setInt32("sample-rate", params.nSampleRate); 5199 break; 5200 } 5201 5202 case OMX_AUDIO_CodingVORBIS: 5203 { 5204 OMX_AUDIO_PARAM_VORBISTYPE params; 5205 InitOMXParams(¶ms); 5206 params.nPortIndex = portIndex; 5207 5208 err = mOMX->getParameter( 5209 mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 5210 if (err != OK) { 5211 return err; 5212 } 5213 5214 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 5215 notify->setInt32("channel-count", params.nChannels); 5216 notify->setInt32("sample-rate", params.nSampleRate); 5217 break; 5218 } 5219 5220 case OMX_AUDIO_CodingAndroidAC3: 5221 { 5222 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 5223 InitOMXParams(¶ms); 5224 params.nPortIndex = portIndex; 5225 5226 err = mOMX->getParameter( 5227 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 5228 ¶ms, sizeof(params)); 5229 if (err != OK) { 5230 return err; 5231 } 5232 5233 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 5234 notify->setInt32("channel-count", params.nChannels); 5235 notify->setInt32("sample-rate", params.nSampleRate); 5236 break; 5237 } 5238 5239 case OMX_AUDIO_CodingAndroidEAC3: 5240 { 5241 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 5242 InitOMXParams(¶ms); 5243 params.nPortIndex = portIndex; 5244 5245 err = mOMX->getParameter( 5246 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 5247 ¶ms, sizeof(params)); 5248 if (err != OK) { 5249 return err; 5250 } 5251 5252 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 5253 notify->setInt32("channel-count", params.nChannels); 5254 notify->setInt32("sample-rate", params.nSampleRate); 5255 break; 5256 } 5257 5258 case OMX_AUDIO_CodingAndroidOPUS: 5259 { 5260 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 5261 InitOMXParams(¶ms); 5262 params.nPortIndex = portIndex; 5263 5264 err = mOMX->getParameter( 5265 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 5266 ¶ms, sizeof(params)); 5267 if (err != OK) { 5268 return err; 5269 } 5270 5271 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 5272 notify->setInt32("channel-count", params.nChannels); 5273 notify->setInt32("sample-rate", params.nSampleRate); 5274 break; 5275 } 5276 5277 case OMX_AUDIO_CodingG711: 5278 { 5279 OMX_AUDIO_PARAM_PCMMODETYPE params; 5280 InitOMXParams(¶ms); 5281 params.nPortIndex = portIndex; 5282 5283 err = mOMX->getParameter( 5284 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5285 if (err != OK) { 5286 return err; 5287 } 5288 5289 const char *mime = NULL; 5290 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 5291 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 5292 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 5293 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 5294 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 5295 mime = MEDIA_MIMETYPE_AUDIO_RAW; 5296 } 5297 notify->setString("mime", mime); 5298 notify->setInt32("channel-count", params.nChannels); 5299 notify->setInt32("sample-rate", params.nSamplingRate); 5300 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 5301 break; 5302 } 5303 5304 case OMX_AUDIO_CodingGSMFR: 5305 { 5306 OMX_AUDIO_PARAM_PCMMODETYPE params; 5307 InitOMXParams(¶ms); 5308 params.nPortIndex = portIndex; 5309 5310 err = mOMX->getParameter( 5311 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5312 if (err != OK) { 5313 return err; 5314 } 5315 5316 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 5317 notify->setInt32("channel-count", params.nChannels); 5318 notify->setInt32("sample-rate", params.nSamplingRate); 5319 break; 5320 } 5321 5322 default: 5323 ALOGE("Unsupported audio coding: %s(%d)\n", 5324 asString(audioDef->eEncoding), audioDef->eEncoding); 5325 return BAD_TYPE; 5326 } 5327 break; 5328 } 5329 5330 default: 5331 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 5332 return BAD_TYPE; 5333 } 5334 5335 return OK; 5336} 5337 5338void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 5339 // aspects are normally communicated in ColorAspects 5340 int32_t range, standard, transfer; 5341 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 5342 5343 // if some aspects are unspecified, use dataspace fields 5344 if (range != 0) { 5345 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 5346 } 5347 if (standard != 0) { 5348 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 5349 } 5350 if (transfer != 0) { 5351 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 5352 } 5353 5354 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 5355 if (range != 0) { 5356 mOutputFormat->setInt32("color-range", range); 5357 } 5358 if (standard != 0) { 5359 mOutputFormat->setInt32("color-standard", standard); 5360 } 5361 if (transfer != 0) { 5362 mOutputFormat->setInt32("color-transfer", transfer); 5363 } 5364 5365 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 5366 "(R:%d(%s), S:%d(%s), T:%d(%s))", 5367 dataSpace, 5368 aspects.mRange, asString(aspects.mRange), 5369 aspects.mPrimaries, asString(aspects.mPrimaries), 5370 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 5371 aspects.mTransfer, asString(aspects.mTransfer), 5372 range, asString((ColorRange)range), 5373 standard, asString((ColorStandard)standard), 5374 transfer, asString((ColorTransfer)transfer)); 5375} 5376 5377void ACodec::onOutputFormatChanged(sp<const AMessage> expectedFormat) { 5378 // store new output format, at the same time mark that this is no longer the first frame 5379 mOutputFormat = mBaseOutputFormat->dup(); 5380 5381 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 5382 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 5383 return; 5384 } 5385 5386 if (expectedFormat != NULL) { 5387 sp<const AMessage> changes = expectedFormat->changesFrom(mOutputFormat); 5388 sp<const AMessage> to = mOutputFormat->changesFrom(expectedFormat); 5389 if (changes->countEntries() != 0 || to->countEntries() != 0) { 5390 ALOGW("[%s] BAD CODEC: Output format changed unexpectedly from (diff) %s to (diff) %s", 5391 mComponentName.c_str(), 5392 changes->debugString(4).c_str(), to->debugString(4).c_str()); 5393 } 5394 } 5395 5396 if (!mIsVideo && !mIsEncoder) { 5397 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 5398 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5399 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 5400 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5401 5402 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 5403 if (mConverter[kPortIndexOutput] != NULL) { 5404 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 5405 } 5406 } 5407 5408 if (mTunneled) { 5409 sendFormatChange(); 5410 } 5411} 5412 5413void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 5414 AString mime; 5415 CHECK(mOutputFormat->findString("mime", &mime)); 5416 5417 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 5418 // notify renderer of the crop change and dataspace change 5419 // NOTE: native window uses extended right-bottom coordinate 5420 int32_t left, top, right, bottom; 5421 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 5422 notify->setRect("crop", left, top, right + 1, bottom + 1); 5423 } 5424 5425 int32_t dataSpace; 5426 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 5427 notify->setInt32("dataspace", dataSpace); 5428 } 5429 } 5430} 5431 5432void ACodec::sendFormatChange() { 5433 AString mime; 5434 CHECK(mOutputFormat->findString("mime", &mime)); 5435 5436 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 5437 int32_t channelCount, sampleRate; 5438 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 5439 CHECK(mOutputFormat->findInt32("sample-rate", &sampleRate)); 5440 if (mSampleRate != 0 && sampleRate != 0) { 5441 mEncoderDelay = mEncoderDelay * sampleRate / mSampleRate; 5442 mEncoderPadding = mEncoderPadding * sampleRate / mSampleRate; 5443 mSampleRate = sampleRate; 5444 } 5445 if (mSkipCutBuffer != NULL) { 5446 size_t prevbufsize = mSkipCutBuffer->size(); 5447 if (prevbufsize != 0) { 5448 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 5449 } 5450 } 5451 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 5452 } 5453 5454 sp<AMessage> notify = mNotify->dup(); 5455 notify->setInt32("what", kWhatOutputFormatChanged); 5456 notify->setMessage("format", mOutputFormat); 5457 notify->post(); 5458 5459 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 5460 mLastOutputFormat = mOutputFormat; 5461} 5462 5463void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 5464 sp<AMessage> notify = mNotify->dup(); 5465 notify->setInt32("what", CodecBase::kWhatError); 5466 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 5467 5468 if (internalError == UNKNOWN_ERROR) { // find better error code 5469 const status_t omxStatus = statusFromOMXError(error); 5470 if (omxStatus != 0) { 5471 internalError = omxStatus; 5472 } else { 5473 ALOGW("Invalid OMX error %#x", error); 5474 } 5475 } 5476 5477 mFatalError = true; 5478 5479 notify->setInt32("err", internalError); 5480 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5481 notify->post(); 5482} 5483 5484//////////////////////////////////////////////////////////////////////////////// 5485 5486ACodec::PortDescription::PortDescription() { 5487} 5488 5489status_t ACodec::requestIDRFrame() { 5490 if (!mIsEncoder) { 5491 return ERROR_UNSUPPORTED; 5492 } 5493 5494 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5495 InitOMXParams(¶ms); 5496 5497 params.nPortIndex = kPortIndexOutput; 5498 params.IntraRefreshVOP = OMX_TRUE; 5499 5500 return mOMX->setConfig( 5501 mNode, 5502 OMX_IndexConfigVideoIntraVOPRefresh, 5503 ¶ms, 5504 sizeof(params)); 5505} 5506 5507void ACodec::PortDescription::addBuffer( 5508 IOMX::buffer_id id, const sp<ABuffer> &buffer, 5509 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) { 5510 mBufferIDs.push_back(id); 5511 mBuffers.push_back(buffer); 5512 mHandles.push_back(handle); 5513 mMemRefs.push_back(memRef); 5514} 5515 5516size_t ACodec::PortDescription::countBuffers() { 5517 return mBufferIDs.size(); 5518} 5519 5520IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5521 return mBufferIDs.itemAt(index); 5522} 5523 5524sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5525 return mBuffers.itemAt(index); 5526} 5527 5528sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const { 5529 return mHandles.itemAt(index); 5530} 5531 5532sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const { 5533 return mMemRefs.itemAt(index); 5534} 5535 5536//////////////////////////////////////////////////////////////////////////////// 5537 5538ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5539 : AState(parentState), 5540 mCodec(codec) { 5541} 5542 5543ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5544 OMX_U32 /* portIndex */) { 5545 return KEEP_BUFFERS; 5546} 5547 5548bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5549 switch (msg->what()) { 5550 case kWhatInputBufferFilled: 5551 { 5552 onInputBufferFilled(msg); 5553 break; 5554 } 5555 5556 case kWhatOutputBufferDrained: 5557 { 5558 onOutputBufferDrained(msg); 5559 break; 5560 } 5561 5562 case ACodec::kWhatOMXMessageList: 5563 { 5564 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5565 } 5566 5567 case ACodec::kWhatOMXMessageItem: 5568 { 5569 // no need to check as we already did it for kWhatOMXMessageList 5570 return onOMXMessage(msg); 5571 } 5572 5573 case ACodec::kWhatOMXMessage: 5574 { 5575 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5576 } 5577 5578 case ACodec::kWhatSetSurface: 5579 { 5580 sp<AReplyToken> replyID; 5581 CHECK(msg->senderAwaitsResponse(&replyID)); 5582 5583 sp<RefBase> obj; 5584 CHECK(msg->findObject("surface", &obj)); 5585 5586 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5587 5588 sp<AMessage> response = new AMessage; 5589 response->setInt32("err", err); 5590 response->postReply(replyID); 5591 break; 5592 } 5593 5594 case ACodec::kWhatCreateInputSurface: 5595 case ACodec::kWhatSetInputSurface: 5596 case ACodec::kWhatSignalEndOfInputStream: 5597 { 5598 // This may result in an app illegal state exception. 5599 ALOGE("Message 0x%x was not handled", msg->what()); 5600 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5601 return true; 5602 } 5603 5604 case ACodec::kWhatOMXDied: 5605 { 5606 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5607 ALOGE("OMX/mediaserver died, signalling error!"); 5608 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5609 break; 5610 } 5611 5612 case ACodec::kWhatReleaseCodecInstance: 5613 { 5614 ALOGI("[%s] forcing the release of codec", 5615 mCodec->mComponentName.c_str()); 5616 status_t err = mCodec->mOMX->freeNode(mCodec->mNode); 5617 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5618 mCodec->mComponentName.c_str(), err); 5619 sp<AMessage> notify = mCodec->mNotify->dup(); 5620 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5621 notify->post(); 5622 break; 5623 } 5624 5625 default: 5626 return false; 5627 } 5628 5629 return true; 5630} 5631 5632bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5633 // there is a possibility that this is an outstanding message for a 5634 // codec that we have already destroyed 5635 if (mCodec->mNode == 0) { 5636 ALOGI("ignoring message as already freed component: %s", 5637 msg->debugString().c_str()); 5638 return false; 5639 } 5640 5641 IOMX::node_id nodeID; 5642 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 5643 if (nodeID != mCodec->mNode) { 5644 ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode); 5645 return false; 5646 } 5647 return true; 5648} 5649 5650bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5651 sp<RefBase> obj; 5652 CHECK(msg->findObject("messages", &obj)); 5653 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5654 5655 bool receivedRenderedEvents = false; 5656 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5657 it != msgList->getList().cend(); ++it) { 5658 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5659 mCodec->handleMessage(*it); 5660 int32_t type; 5661 CHECK((*it)->findInt32("type", &type)); 5662 if (type == omx_message::FRAME_RENDERED) { 5663 receivedRenderedEvents = true; 5664 } 5665 } 5666 5667 if (receivedRenderedEvents) { 5668 // NOTE: all buffers are rendered in this case 5669 mCodec->notifyOfRenderedFrames(); 5670 } 5671 return true; 5672} 5673 5674bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5675 int32_t type; 5676 CHECK(msg->findInt32("type", &type)); 5677 5678 switch (type) { 5679 case omx_message::EVENT: 5680 { 5681 int32_t event, data1, data2; 5682 CHECK(msg->findInt32("event", &event)); 5683 CHECK(msg->findInt32("data1", &data1)); 5684 CHECK(msg->findInt32("data2", &data2)); 5685 5686 if (event == OMX_EventCmdComplete 5687 && data1 == OMX_CommandFlush 5688 && data2 == (int32_t)OMX_ALL) { 5689 // Use of this notification is not consistent across 5690 // implementations. We'll drop this notification and rely 5691 // on flush-complete notifications on the individual port 5692 // indices instead. 5693 5694 return true; 5695 } 5696 5697 return onOMXEvent( 5698 static_cast<OMX_EVENTTYPE>(event), 5699 static_cast<OMX_U32>(data1), 5700 static_cast<OMX_U32>(data2)); 5701 } 5702 5703 case omx_message::EMPTY_BUFFER_DONE: 5704 { 5705 IOMX::buffer_id bufferID; 5706 int32_t fenceFd; 5707 5708 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5709 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5710 5711 return onOMXEmptyBufferDone(bufferID, fenceFd); 5712 } 5713 5714 case omx_message::FILL_BUFFER_DONE: 5715 { 5716 IOMX::buffer_id bufferID; 5717 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5718 5719 int32_t rangeOffset, rangeLength, flags, fenceFd; 5720 int64_t timeUs; 5721 5722 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5723 CHECK(msg->findInt32("range_length", &rangeLength)); 5724 CHECK(msg->findInt32("flags", &flags)); 5725 CHECK(msg->findInt64("timestamp", &timeUs)); 5726 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5727 5728 return onOMXFillBufferDone( 5729 bufferID, 5730 (size_t)rangeOffset, (size_t)rangeLength, 5731 (OMX_U32)flags, 5732 timeUs, 5733 fenceFd); 5734 } 5735 5736 case omx_message::FRAME_RENDERED: 5737 { 5738 int64_t mediaTimeUs, systemNano; 5739 5740 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5741 CHECK(msg->findInt64("system_nano", &systemNano)); 5742 5743 return onOMXFrameRendered( 5744 mediaTimeUs, systemNano); 5745 } 5746 5747 default: 5748 ALOGE("Unexpected message type: %d", type); 5749 return false; 5750 } 5751} 5752 5753bool ACodec::BaseState::onOMXFrameRendered( 5754 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5755 // ignore outside of Executing and PortSettingsChanged states 5756 return true; 5757} 5758 5759bool ACodec::BaseState::onOMXEvent( 5760 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5761 if (event == OMX_EventDataSpaceChanged) { 5762 ColorAspects aspects; 5763 aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF); 5764 aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF); 5765 aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF); 5766 aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF); 5767 5768 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5769 return true; 5770 } 5771 5772 if (event != OMX_EventError) { 5773 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5774 mCodec->mComponentName.c_str(), event, data1, data2); 5775 5776 return false; 5777 } 5778 5779 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5780 5781 // verify OMX component sends back an error we expect. 5782 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5783 if (!isOMXError(omxError)) { 5784 ALOGW("Invalid OMX error %#x", omxError); 5785 omxError = OMX_ErrorUndefined; 5786 } 5787 mCodec->signalError(omxError); 5788 5789 return true; 5790} 5791 5792bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5793 ALOGV("[%s] onOMXEmptyBufferDone %u", 5794 mCodec->mComponentName.c_str(), bufferID); 5795 5796 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5797 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5798 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5799 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5800 mCodec->dumpBuffers(kPortIndexInput); 5801 if (fenceFd >= 0) { 5802 ::close(fenceFd); 5803 } 5804 return false; 5805 } 5806 info->mStatus = BufferInfo::OWNED_BY_US; 5807 5808 // input buffers cannot take fences, so wait for any fence now 5809 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5810 fenceFd = -1; 5811 5812 // still save fence for completeness 5813 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5814 5815 // We're in "store-metadata-in-buffers" mode, the underlying 5816 // OMX component had access to data that's implicitly refcounted 5817 // by this "MediaBuffer" object. Now that the OMX component has 5818 // told us that it's done with the input buffer, we can decrement 5819 // the mediaBuffer's reference count. 5820 info->mData->setMediaBufferBase(NULL); 5821 5822 PortMode mode = getPortMode(kPortIndexInput); 5823 5824 switch (mode) { 5825 case KEEP_BUFFERS: 5826 break; 5827 5828 case RESUBMIT_BUFFERS: 5829 postFillThisBuffer(info); 5830 break; 5831 5832 case FREE_BUFFERS: 5833 default: 5834 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5835 return false; 5836 } 5837 5838 return true; 5839} 5840 5841void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5842 if (mCodec->mPortEOS[kPortIndexInput]) { 5843 return; 5844 } 5845 5846 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5847 5848 sp<AMessage> notify = mCodec->mNotify->dup(); 5849 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5850 notify->setInt32("buffer-id", info->mBufferID); 5851 5852 info->mData->meta()->clear(); 5853 notify->setBuffer("buffer", info->mData); 5854 5855 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5856 reply->setInt32("buffer-id", info->mBufferID); 5857 5858 notify->setMessage("reply", reply); 5859 5860 notify->post(); 5861 5862 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5863} 5864 5865void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5866 IOMX::buffer_id bufferID; 5867 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5868 sp<ABuffer> buffer; 5869 int32_t err = OK; 5870 bool eos = false; 5871 PortMode mode = getPortMode(kPortIndexInput); 5872 5873 if (!msg->findBuffer("buffer", &buffer)) { 5874 /* these are unfilled buffers returned by client */ 5875 CHECK(msg->findInt32("err", &err)); 5876 5877 if (err == OK) { 5878 /* buffers with no errors are returned on MediaCodec.flush */ 5879 mode = KEEP_BUFFERS; 5880 } else { 5881 ALOGV("[%s] saw error %d instead of an input buffer", 5882 mCodec->mComponentName.c_str(), err); 5883 eos = true; 5884 } 5885 5886 buffer.clear(); 5887 } 5888 5889 int32_t tmp; 5890 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5891 eos = true; 5892 err = ERROR_END_OF_STREAM; 5893 } 5894 5895 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5896 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5897 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5898 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5899 mCodec->dumpBuffers(kPortIndexInput); 5900 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5901 return; 5902 } 5903 5904 info->mStatus = BufferInfo::OWNED_BY_US; 5905 5906 switch (mode) { 5907 case KEEP_BUFFERS: 5908 { 5909 if (eos) { 5910 if (!mCodec->mPortEOS[kPortIndexInput]) { 5911 mCodec->mPortEOS[kPortIndexInput] = true; 5912 mCodec->mInputEOSResult = err; 5913 } 5914 } 5915 break; 5916 } 5917 5918 case RESUBMIT_BUFFERS: 5919 { 5920 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5921 // Do not send empty input buffer w/o EOS to the component. 5922 if (buffer->size() == 0 && !eos) { 5923 postFillThisBuffer(info); 5924 break; 5925 } 5926 5927 int64_t timeUs; 5928 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5929 5930 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5931 5932 MetadataBufferType metaType = mCodec->mInputMetadataType; 5933 int32_t isCSD = 0; 5934 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5935 if (mCodec->mIsLegacyVP9Decoder) { 5936 ALOGV("[%s] is legacy VP9 decoder. Ignore %u codec specific data", 5937 mCodec->mComponentName.c_str(), bufferID); 5938 postFillThisBuffer(info); 5939 break; 5940 } 5941 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5942 metaType = kMetadataBufferTypeInvalid; 5943 } 5944 5945 if (eos) { 5946 flags |= OMX_BUFFERFLAG_EOS; 5947 } 5948 5949 if (buffer != info->mCodecData) { 5950 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5951 mCodec->mComponentName.c_str(), 5952 bufferID, 5953 buffer.get(), info->mCodecData.get()); 5954 5955 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; 5956 if (converter == NULL || isCSD) { 5957 converter = getCopyConverter(); 5958 } 5959 status_t err = converter->convert(buffer, info->mCodecData); 5960 if (err != OK) { 5961 mCodec->signalError(OMX_ErrorUndefined, err); 5962 return; 5963 } 5964 } 5965 5966 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5967 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5968 mCodec->mComponentName.c_str(), bufferID); 5969 } else if (flags & OMX_BUFFERFLAG_EOS) { 5970 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5971 mCodec->mComponentName.c_str(), bufferID); 5972 } else { 5973#if TRACK_BUFFER_TIMING 5974 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5975 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5976#else 5977 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5978 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5979#endif 5980 } 5981 5982#if TRACK_BUFFER_TIMING 5983 ACodec::BufferStats stats; 5984 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5985 stats.mFillBufferDoneTimeUs = -1ll; 5986 mCodec->mBufferStats.add(timeUs, stats); 5987#endif 5988 5989 if (mCodec->storingMetadataInDecodedBuffers()) { 5990 // try to submit an output buffer for each input buffer 5991 PortMode outputMode = getPortMode(kPortIndexOutput); 5992 5993 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 5994 mCodec->mMetadataBuffersToSubmit, 5995 (outputMode == FREE_BUFFERS ? "FREE" : 5996 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 5997 if (outputMode == RESUBMIT_BUFFERS) { 5998 mCodec->submitOutputMetadataBuffer(); 5999 } 6000 } 6001 info->checkReadFence("onInputBufferFilled"); 6002 6003 status_t err2 = OK; 6004 switch (metaType) { 6005 case kMetadataBufferTypeInvalid: 6006 break; 6007#ifndef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 6008 case kMetadataBufferTypeNativeHandleSource: 6009 if (info->mCodecData->size() >= sizeof(VideoNativeHandleMetadata)) { 6010 VideoNativeHandleMetadata *vnhmd = 6011 (VideoNativeHandleMetadata*)info->mCodecData->base(); 6012 err2 = mCodec->mOMX->updateNativeHandleInMeta( 6013 mCodec->mNode, kPortIndexInput, 6014 NativeHandle::create(vnhmd->pHandle, false /* ownsHandle */), 6015 bufferID); 6016 } 6017 break; 6018 case kMetadataBufferTypeANWBuffer: 6019 if (info->mCodecData->size() >= sizeof(VideoNativeMetadata)) { 6020 VideoNativeMetadata *vnmd = (VideoNativeMetadata*)info->mCodecData->base(); 6021 err2 = mCodec->mOMX->updateGraphicBufferInMeta( 6022 mCodec->mNode, kPortIndexInput, 6023 new GraphicBuffer(vnmd->pBuffer, false /* keepOwnership */), 6024 bufferID); 6025 } 6026 break; 6027#endif 6028 default: 6029 ALOGW("Can't marshall %s data in %zu sized buffers in %zu-bit mode", 6030 asString(metaType), info->mCodecData->size(), 6031 sizeof(buffer_handle_t) * 8); 6032 err2 = ERROR_UNSUPPORTED; 6033 break; 6034 } 6035 6036 if (err2 == OK) { 6037 err2 = mCodec->mOMX->emptyBuffer( 6038 mCodec->mNode, 6039 bufferID, 6040 0, 6041 info->mCodecData->size(), 6042 flags, 6043 timeUs, 6044 info->mFenceFd); 6045 } 6046 info->mFenceFd = -1; 6047 if (err2 != OK) { 6048 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 6049 return; 6050 } 6051 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6052 6053 if (!eos && err == OK) { 6054 getMoreInputDataIfPossible(); 6055 } else { 6056 ALOGV("[%s] Signalled EOS (%d) on the input port", 6057 mCodec->mComponentName.c_str(), err); 6058 6059 mCodec->mPortEOS[kPortIndexInput] = true; 6060 mCodec->mInputEOSResult = err; 6061 } 6062 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 6063 if (err != OK && err != ERROR_END_OF_STREAM) { 6064 ALOGV("[%s] Signalling EOS on the input port due to error %d", 6065 mCodec->mComponentName.c_str(), err); 6066 } else { 6067 ALOGV("[%s] Signalling EOS on the input port", 6068 mCodec->mComponentName.c_str()); 6069 } 6070 6071 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 6072 mCodec->mComponentName.c_str(), bufferID); 6073 6074 info->checkReadFence("onInputBufferFilled"); 6075 status_t err2 = mCodec->mOMX->emptyBuffer( 6076 mCodec->mNode, 6077 bufferID, 6078 0, 6079 0, 6080 OMX_BUFFERFLAG_EOS, 6081 0, 6082 info->mFenceFd); 6083 info->mFenceFd = -1; 6084 if (err2 != OK) { 6085 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 6086 return; 6087 } 6088 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6089 6090 mCodec->mPortEOS[kPortIndexInput] = true; 6091 mCodec->mInputEOSResult = err; 6092 } 6093 break; 6094 } 6095 6096 case FREE_BUFFERS: 6097 break; 6098 6099 default: 6100 ALOGE("invalid port mode: %d", mode); 6101 break; 6102 } 6103} 6104 6105void ACodec::BaseState::getMoreInputDataIfPossible() { 6106 if (mCodec->mPortEOS[kPortIndexInput]) { 6107 return; 6108 } 6109 6110 BufferInfo *eligible = NULL; 6111 6112 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 6113 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6114 6115#if 0 6116 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 6117 // There's already a "read" pending. 6118 return; 6119 } 6120#endif 6121 6122 if (info->mStatus == BufferInfo::OWNED_BY_US) { 6123 eligible = info; 6124 } 6125 } 6126 6127 if (eligible == NULL) { 6128 return; 6129 } 6130 6131 postFillThisBuffer(eligible); 6132} 6133 6134bool ACodec::BaseState::onOMXFillBufferDone( 6135 IOMX::buffer_id bufferID, 6136 size_t rangeOffset, size_t rangeLength, 6137 OMX_U32 flags, 6138 int64_t timeUs, 6139 int fenceFd) { 6140 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 6141 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 6142 6143 ssize_t index; 6144 status_t err= OK; 6145 6146#if TRACK_BUFFER_TIMING 6147 index = mCodec->mBufferStats.indexOfKey(timeUs); 6148 if (index >= 0) { 6149 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 6150 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 6151 6152 ALOGI("frame PTS %lld: %lld", 6153 timeUs, 6154 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 6155 6156 mCodec->mBufferStats.removeItemsAt(index); 6157 stats = NULL; 6158 } 6159#endif 6160 6161 BufferInfo *info = 6162 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6163 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6164 if (status != BufferInfo::OWNED_BY_COMPONENT) { 6165 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6166 mCodec->dumpBuffers(kPortIndexOutput); 6167 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6168 if (fenceFd >= 0) { 6169 ::close(fenceFd); 6170 } 6171 return true; 6172 } 6173 6174 info->mDequeuedAt = ++mCodec->mDequeueCounter; 6175 info->mStatus = BufferInfo::OWNED_BY_US; 6176 6177 if (info->mRenderInfo != NULL) { 6178 // The fence for an emptied buffer must have signaled, but there still could be queued 6179 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 6180 // as we will soon requeue this buffer to the surface. While in theory we could still keep 6181 // track of buffers that are requeued to the surface, it is better to add support to the 6182 // buffer-queue to notify us of released buffers and their fences (in the future). 6183 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 6184 } 6185 6186 // byte buffers cannot take fences, so wait for any fence now 6187 if (mCodec->mNativeWindow == NULL) { 6188 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 6189 fenceFd = -1; 6190 } 6191 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 6192 6193 PortMode mode = getPortMode(kPortIndexOutput); 6194 6195 switch (mode) { 6196 case KEEP_BUFFERS: 6197 break; 6198 6199 case RESUBMIT_BUFFERS: 6200 { 6201 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 6202 || mCodec->mPortEOS[kPortIndexOutput])) { 6203 ALOGV("[%s] calling fillBuffer %u", 6204 mCodec->mComponentName.c_str(), info->mBufferID); 6205 6206 err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 6207 info->mFenceFd = -1; 6208 if (err != OK) { 6209 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6210 return true; 6211 } 6212 6213 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6214 break; 6215 } 6216 6217 sp<AMessage> reply = 6218 new AMessage(kWhatOutputBufferDrained, mCodec); 6219 6220 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 6221 // pretend that output format has changed on the first frame (we used to do this) 6222 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) { 6223 mCodec->onOutputFormatChanged(mCodec->mOutputFormat); 6224 } 6225 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 6226 mCodec->sendFormatChange(); 6227 } else if (rangeLength > 0 && mCodec->mNativeWindow != NULL) { 6228 // If potentially rendering onto a surface, always save key format data (crop & 6229 // data space) so that we can set it if and once the buffer is rendered. 6230 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 6231 } 6232 6233 if (mCodec->usingMetadataOnEncoderOutput()) { 6234 native_handle_t *handle = NULL; 6235 VideoNativeHandleMetadata &nativeMeta = 6236 *(VideoNativeHandleMetadata *)info->mData->data(); 6237 if (info->mData->size() >= sizeof(nativeMeta) 6238 && nativeMeta.eType == kMetadataBufferTypeNativeHandleSource) { 6239#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 6240 // handle is only valid on 32-bit/mediaserver process 6241 handle = NULL; 6242#else 6243 handle = (native_handle_t *)nativeMeta.pHandle; 6244#endif 6245 } 6246 info->mData->meta()->setPointer("handle", handle); 6247 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 6248 info->mData->meta()->setInt32("rangeLength", rangeLength); 6249 } else if (info->mData == info->mCodecData) { 6250 info->mData->setRange(rangeOffset, rangeLength); 6251 } else { 6252 info->mCodecData->setRange(rangeOffset, rangeLength); 6253 // in this case we know that mConverter is not null 6254 status_t err = mCodec->mConverter[kPortIndexOutput]->convert( 6255 info->mCodecData, info->mData); 6256 if (err != OK) { 6257 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6258 return true; 6259 } 6260 } 6261#if 0 6262 if (mCodec->mNativeWindow == NULL) { 6263 if (IsIDR(info->mData)) { 6264 ALOGI("IDR frame"); 6265 } 6266 } 6267#endif 6268 6269 if (mCodec->mSkipCutBuffer != NULL) { 6270 mCodec->mSkipCutBuffer->submit(info->mData); 6271 } 6272 info->mData->meta()->setInt64("timeUs", timeUs); 6273 6274 sp<AMessage> notify = mCodec->mNotify->dup(); 6275 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 6276 notify->setInt32("buffer-id", info->mBufferID); 6277 notify->setBuffer("buffer", info->mData); 6278 notify->setInt32("flags", flags); 6279 6280 reply->setInt32("buffer-id", info->mBufferID); 6281 6282 notify->setMessage("reply", reply); 6283 6284 notify->post(); 6285 6286 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 6287 6288 if (flags & OMX_BUFFERFLAG_EOS) { 6289 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 6290 6291 sp<AMessage> notify = mCodec->mNotify->dup(); 6292 notify->setInt32("what", CodecBase::kWhatEOS); 6293 notify->setInt32("err", mCodec->mInputEOSResult); 6294 notify->post(); 6295 6296 mCodec->mPortEOS[kPortIndexOutput] = true; 6297 } 6298 break; 6299 } 6300 6301 case FREE_BUFFERS: 6302 err = mCodec->freeBuffer(kPortIndexOutput, index); 6303 if (err != OK) { 6304 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6305 return true; 6306 } 6307 break; 6308 6309 default: 6310 ALOGE("Invalid port mode: %d", mode); 6311 return false; 6312 } 6313 6314 return true; 6315} 6316 6317void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 6318 IOMX::buffer_id bufferID; 6319 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 6320 ssize_t index; 6321 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6322 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6323 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 6324 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6325 mCodec->dumpBuffers(kPortIndexOutput); 6326 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6327 return; 6328 } 6329 6330 android_native_rect_t crop; 6331 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom) 6332 && memcmp(&crop, &mCodec->mLastNativeWindowCrop, sizeof(crop)) != 0) { 6333 mCodec->mLastNativeWindowCrop = crop; 6334 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 6335 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 6336 } 6337 6338 int32_t dataSpace; 6339 if (msg->findInt32("dataspace", &dataSpace) 6340 && dataSpace != mCodec->mLastNativeWindowDataSpace) { 6341 status_t err = native_window_set_buffers_data_space( 6342 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 6343 mCodec->mLastNativeWindowDataSpace = dataSpace; 6344 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 6345 } 6346 6347 int32_t render; 6348 if (mCodec->mNativeWindow != NULL 6349 && msg->findInt32("render", &render) && render != 0 6350 && info->mData != NULL && info->mData->size() != 0) { 6351 ATRACE_NAME("render"); 6352 // The client wants this buffer to be rendered. 6353 6354 // save buffers sent to the surface so we can get render time when they return 6355 int64_t mediaTimeUs = -1; 6356 info->mData->meta()->findInt64("timeUs", &mediaTimeUs); 6357 if (mediaTimeUs >= 0) { 6358 mCodec->mRenderTracker.onFrameQueued( 6359 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 6360 } 6361 6362 int64_t timestampNs = 0; 6363 if (!msg->findInt64("timestampNs", ×tampNs)) { 6364 // use media timestamp if client did not request a specific render timestamp 6365 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 6366 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 6367 timestampNs *= 1000; 6368 } 6369 } 6370 6371 status_t err; 6372 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 6373 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 6374 6375 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 6376 err = mCodec->mNativeWindow->queueBuffer( 6377 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 6378 info->mFenceFd = -1; 6379 if (err == OK) { 6380 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 6381 } else { 6382 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 6383 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6384 info->mStatus = BufferInfo::OWNED_BY_US; 6385 // keeping read fence as write fence to avoid clobbering 6386 info->mIsReadFence = false; 6387 } 6388 } else { 6389 if (mCodec->mNativeWindow != NULL && 6390 (info->mData == NULL || info->mData->size() != 0)) { 6391 // move read fence into write fence to avoid clobbering 6392 info->mIsReadFence = false; 6393 ATRACE_NAME("frame-drop"); 6394 } 6395 info->mStatus = BufferInfo::OWNED_BY_US; 6396 } 6397 6398 PortMode mode = getPortMode(kPortIndexOutput); 6399 6400 switch (mode) { 6401 case KEEP_BUFFERS: 6402 { 6403 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 6404 6405 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6406 // We cannot resubmit the buffer we just rendered, dequeue 6407 // the spare instead. 6408 6409 info = mCodec->dequeueBufferFromNativeWindow(); 6410 } 6411 break; 6412 } 6413 6414 case RESUBMIT_BUFFERS: 6415 { 6416 if (!mCodec->mPortEOS[kPortIndexOutput]) { 6417 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6418 // We cannot resubmit the buffer we just rendered, dequeue 6419 // the spare instead. 6420 6421 info = mCodec->dequeueBufferFromNativeWindow(); 6422 } 6423 6424 if (info != NULL) { 6425 ALOGV("[%s] calling fillBuffer %u", 6426 mCodec->mComponentName.c_str(), info->mBufferID); 6427 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 6428 status_t err = mCodec->mOMX->fillBuffer( 6429 mCodec->mNode, info->mBufferID, info->mFenceFd); 6430 info->mFenceFd = -1; 6431 if (err == OK) { 6432 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6433 } else { 6434 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6435 } 6436 } 6437 } 6438 break; 6439 } 6440 6441 case FREE_BUFFERS: 6442 { 6443 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 6444 if (err != OK) { 6445 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6446 } 6447 break; 6448 } 6449 6450 default: 6451 ALOGE("Invalid port mode: %d", mode); 6452 return; 6453 } 6454} 6455 6456//////////////////////////////////////////////////////////////////////////////// 6457 6458ACodec::UninitializedState::UninitializedState(ACodec *codec) 6459 : BaseState(codec) { 6460} 6461 6462void ACodec::UninitializedState::stateEntered() { 6463 ALOGV("Now uninitialized"); 6464 6465 if (mDeathNotifier != NULL) { 6466 mCodec->mNodeBinder->unlinkToDeath(mDeathNotifier); 6467 mDeathNotifier.clear(); 6468 } 6469 6470 mCodec->mUsingNativeWindow = false; 6471 mCodec->mNativeWindow.clear(); 6472 mCodec->mNativeWindowUsageBits = 0; 6473 mCodec->mNode = 0; 6474 mCodec->mOMX.clear(); 6475 mCodec->mQuirks = 0; 6476 mCodec->mFlags = 0; 6477 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 6478 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 6479 mCodec->mConverter[0].clear(); 6480 mCodec->mConverter[1].clear(); 6481 mCodec->mComponentName.clear(); 6482} 6483 6484bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 6485 bool handled = false; 6486 6487 switch (msg->what()) { 6488 case ACodec::kWhatSetup: 6489 { 6490 onSetup(msg); 6491 6492 handled = true; 6493 break; 6494 } 6495 6496 case ACodec::kWhatAllocateComponent: 6497 { 6498 onAllocateComponent(msg); 6499 handled = true; 6500 break; 6501 } 6502 6503 case ACodec::kWhatShutdown: 6504 { 6505 int32_t keepComponentAllocated; 6506 CHECK(msg->findInt32( 6507 "keepComponentAllocated", &keepComponentAllocated)); 6508 ALOGW_IF(keepComponentAllocated, 6509 "cannot keep component allocated on shutdown in Uninitialized state"); 6510 6511 sp<AMessage> notify = mCodec->mNotify->dup(); 6512 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6513 notify->post(); 6514 6515 handled = true; 6516 break; 6517 } 6518 6519 case ACodec::kWhatFlush: 6520 { 6521 sp<AMessage> notify = mCodec->mNotify->dup(); 6522 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6523 notify->post(); 6524 6525 handled = true; 6526 break; 6527 } 6528 6529 case ACodec::kWhatReleaseCodecInstance: 6530 { 6531 // nothing to do, as we have already signaled shutdown 6532 handled = true; 6533 break; 6534 } 6535 6536 default: 6537 return BaseState::onMessageReceived(msg); 6538 } 6539 6540 return handled; 6541} 6542 6543void ACodec::UninitializedState::onSetup( 6544 const sp<AMessage> &msg) { 6545 if (onAllocateComponent(msg) 6546 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6547 mCodec->mLoadedState->onStart(); 6548 } 6549} 6550 6551bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6552 ALOGV("onAllocateComponent"); 6553 6554 CHECK(mCodec->mNode == 0); 6555 6556 OMXClient client; 6557 if (client.connect() != OK) { 6558 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6559 return false; 6560 } 6561 6562 sp<IOMX> omx = client.interface(); 6563 6564 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6565 6566 Vector<AString> matchingCodecs; 6567 6568 AString mime; 6569 6570 AString componentName; 6571 uint32_t quirks = 0; 6572 int32_t encoder = false; 6573 if (msg->findString("componentName", &componentName)) { 6574 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6575 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6576 matchingCodecs.add(componentName); 6577 } 6578 } else { 6579 CHECK(msg->findString("mime", &mime)); 6580 6581 if (!msg->findInt32("encoder", &encoder)) { 6582 encoder = false; 6583 } 6584 6585 MediaCodecList::findMatchingCodecs( 6586 mime.c_str(), 6587 encoder, // createEncoder 6588 0, // flags 6589 &matchingCodecs); 6590 } 6591 6592 sp<CodecObserver> observer = new CodecObserver; 6593 IOMX::node_id node = 0; 6594 6595 status_t err = NAME_NOT_FOUND; 6596 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6597 ++matchIndex) { 6598 componentName = matchingCodecs[matchIndex]; 6599 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6600 6601 pid_t tid = gettid(); 6602 int prevPriority = androidGetThreadPriority(tid); 6603 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6604 err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node); 6605 androidSetThreadPriority(tid, prevPriority); 6606 6607 if (err == OK) { 6608 break; 6609 } else { 6610 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6611 } 6612 6613 node = 0; 6614 } 6615 6616 if (node == 0) { 6617 if (!mime.empty()) { 6618 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6619 encoder ? "en" : "de", mime.c_str(), err); 6620 } else { 6621 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6622 } 6623 6624 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6625 return false; 6626 } 6627 6628 mDeathNotifier = new DeathNotifier(notify); 6629 if (mCodec->mNodeBinder == NULL || 6630 mCodec->mNodeBinder->linkToDeath(mDeathNotifier) != OK) { 6631 // This was a local binder, if it dies so do we, we won't care 6632 // about any notifications in the afterlife. 6633 mDeathNotifier.clear(); 6634 } 6635 6636 notify = new AMessage(kWhatOMXMessageList, mCodec); 6637 observer->setNotificationMessage(notify); 6638 6639 mCodec->mComponentName = componentName; 6640 mCodec->mRenderTracker.setComponentName(componentName); 6641 mCodec->mFlags = 0; 6642 6643 if (componentName.endsWith(".secure")) { 6644 mCodec->mFlags |= kFlagIsSecure; 6645 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6646 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6647 } 6648 6649 mCodec->mQuirks = quirks; 6650 mCodec->mOMX = omx; 6651 mCodec->mNode = node; 6652 6653 { 6654 sp<AMessage> notify = mCodec->mNotify->dup(); 6655 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6656 notify->setString("componentName", mCodec->mComponentName.c_str()); 6657 notify->post(); 6658 } 6659 6660 mCodec->changeState(mCodec->mLoadedState); 6661 6662 return true; 6663} 6664 6665//////////////////////////////////////////////////////////////////////////////// 6666 6667ACodec::LoadedState::LoadedState(ACodec *codec) 6668 : BaseState(codec) { 6669} 6670 6671void ACodec::LoadedState::stateEntered() { 6672 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6673 6674 mCodec->mPortEOS[kPortIndexInput] = 6675 mCodec->mPortEOS[kPortIndexOutput] = false; 6676 6677 mCodec->mInputEOSResult = OK; 6678 6679 mCodec->mDequeueCounter = 0; 6680 mCodec->mMetadataBuffersToSubmit = 0; 6681 mCodec->mRepeatFrameDelayUs = -1ll; 6682 mCodec->mInputFormat.clear(); 6683 mCodec->mOutputFormat.clear(); 6684 mCodec->mBaseOutputFormat.clear(); 6685 6686 if (mCodec->mShutdownInProgress) { 6687 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6688 6689 mCodec->mShutdownInProgress = false; 6690 mCodec->mKeepComponentAllocated = false; 6691 6692 onShutdown(keepComponentAllocated); 6693 } 6694 mCodec->mExplicitShutdown = false; 6695 6696 mCodec->processDeferredMessages(); 6697} 6698 6699void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6700 if (!keepComponentAllocated) { 6701 (void)mCodec->mOMX->freeNode(mCodec->mNode); 6702 6703 mCodec->changeState(mCodec->mUninitializedState); 6704 } 6705 6706 if (mCodec->mExplicitShutdown) { 6707 sp<AMessage> notify = mCodec->mNotify->dup(); 6708 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6709 notify->post(); 6710 mCodec->mExplicitShutdown = false; 6711 } 6712} 6713 6714bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6715 bool handled = false; 6716 6717 switch (msg->what()) { 6718 case ACodec::kWhatConfigureComponent: 6719 { 6720 onConfigureComponent(msg); 6721 handled = true; 6722 break; 6723 } 6724 6725 case ACodec::kWhatCreateInputSurface: 6726 { 6727 onCreateInputSurface(msg); 6728 handled = true; 6729 break; 6730 } 6731 6732 case ACodec::kWhatSetInputSurface: 6733 { 6734 onSetInputSurface(msg); 6735 handled = true; 6736 break; 6737 } 6738 6739 case ACodec::kWhatStart: 6740 { 6741 onStart(); 6742 handled = true; 6743 break; 6744 } 6745 6746 case ACodec::kWhatShutdown: 6747 { 6748 int32_t keepComponentAllocated; 6749 CHECK(msg->findInt32( 6750 "keepComponentAllocated", &keepComponentAllocated)); 6751 6752 mCodec->mExplicitShutdown = true; 6753 onShutdown(keepComponentAllocated); 6754 6755 handled = true; 6756 break; 6757 } 6758 6759 case ACodec::kWhatFlush: 6760 { 6761 sp<AMessage> notify = mCodec->mNotify->dup(); 6762 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6763 notify->post(); 6764 6765 handled = true; 6766 break; 6767 } 6768 6769 default: 6770 return BaseState::onMessageReceived(msg); 6771 } 6772 6773 return handled; 6774} 6775 6776bool ACodec::LoadedState::onConfigureComponent( 6777 const sp<AMessage> &msg) { 6778 ALOGV("onConfigureComponent"); 6779 6780 CHECK(mCodec->mNode != 0); 6781 6782 status_t err = OK; 6783 AString mime; 6784 if (!msg->findString("mime", &mime)) { 6785 err = BAD_VALUE; 6786 } else { 6787 err = mCodec->configureCodec(mime.c_str(), msg); 6788 } 6789 if (err != OK) { 6790 ALOGE("[%s] configureCodec returning error %d", 6791 mCodec->mComponentName.c_str(), err); 6792 6793 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6794 return false; 6795 } 6796 6797 { 6798 sp<AMessage> notify = mCodec->mNotify->dup(); 6799 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6800 notify->setMessage("input-format", mCodec->mInputFormat); 6801 notify->setMessage("output-format", mCodec->mOutputFormat); 6802 notify->post(); 6803 } 6804 6805 return true; 6806} 6807 6808status_t ACodec::LoadedState::setupInputSurface() { 6809 status_t err = OK; 6810 6811 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6812 err = mCodec->mOMX->setInternalOption( 6813 mCodec->mNode, 6814 kPortIndexInput, 6815 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 6816 &mCodec->mRepeatFrameDelayUs, 6817 sizeof(mCodec->mRepeatFrameDelayUs)); 6818 6819 if (err != OK) { 6820 ALOGE("[%s] Unable to configure option to repeat previous " 6821 "frames (err %d)", 6822 mCodec->mComponentName.c_str(), 6823 err); 6824 return err; 6825 } 6826 } 6827 6828 if (mCodec->mMaxPtsGapUs > 0ll) { 6829 err = mCodec->mOMX->setInternalOption( 6830 mCodec->mNode, 6831 kPortIndexInput, 6832 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 6833 &mCodec->mMaxPtsGapUs, 6834 sizeof(mCodec->mMaxPtsGapUs)); 6835 6836 if (err != OK) { 6837 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6838 mCodec->mComponentName.c_str(), 6839 err); 6840 return err; 6841 } 6842 } 6843 6844 if (mCodec->mMaxFps > 0) { 6845 err = mCodec->mOMX->setInternalOption( 6846 mCodec->mNode, 6847 kPortIndexInput, 6848 IOMX::INTERNAL_OPTION_MAX_FPS, 6849 &mCodec->mMaxFps, 6850 sizeof(mCodec->mMaxFps)); 6851 6852 if (err != OK) { 6853 ALOGE("[%s] Unable to configure max fps (err %d)", 6854 mCodec->mComponentName.c_str(), 6855 err); 6856 return err; 6857 } 6858 } 6859 6860 if (mCodec->mTimePerCaptureUs > 0ll 6861 && mCodec->mTimePerFrameUs > 0ll) { 6862 int64_t timeLapse[2]; 6863 timeLapse[0] = mCodec->mTimePerFrameUs; 6864 timeLapse[1] = mCodec->mTimePerCaptureUs; 6865 err = mCodec->mOMX->setInternalOption( 6866 mCodec->mNode, 6867 kPortIndexInput, 6868 IOMX::INTERNAL_OPTION_TIME_LAPSE, 6869 &timeLapse[0], 6870 sizeof(timeLapse)); 6871 6872 if (err != OK) { 6873 ALOGE("[%s] Unable to configure time lapse (err %d)", 6874 mCodec->mComponentName.c_str(), 6875 err); 6876 return err; 6877 } 6878 } 6879 6880 if (mCodec->mCreateInputBuffersSuspended) { 6881 bool suspend = true; 6882 err = mCodec->mOMX->setInternalOption( 6883 mCodec->mNode, 6884 kPortIndexInput, 6885 IOMX::INTERNAL_OPTION_SUSPEND, 6886 &suspend, 6887 sizeof(suspend)); 6888 6889 if (err != OK) { 6890 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6891 mCodec->mComponentName.c_str(), 6892 err); 6893 return err; 6894 } 6895 } 6896 6897 uint32_t usageBits; 6898 if (mCodec->mOMX->getParameter( 6899 mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6900 &usageBits, sizeof(usageBits)) == OK) { 6901 mCodec->mInputFormat->setInt32( 6902 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6903 } 6904 6905 sp<ABuffer> colorAspectsBuffer; 6906 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6907 err = mCodec->mOMX->setInternalOption( 6908 mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS, 6909 colorAspectsBuffer->base(), colorAspectsBuffer->capacity()); 6910 if (err != OK) { 6911 ALOGE("[%s] Unable to configure color aspects (err %d)", 6912 mCodec->mComponentName.c_str(), err); 6913 return err; 6914 } 6915 } 6916 return OK; 6917} 6918 6919void ACodec::LoadedState::onCreateInputSurface( 6920 const sp<AMessage> & /* msg */) { 6921 ALOGV("onCreateInputSurface"); 6922 6923 sp<AMessage> notify = mCodec->mNotify->dup(); 6924 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6925 6926 android_dataspace dataSpace; 6927 status_t err = 6928 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6929 notify->setMessage("input-format", mCodec->mInputFormat); 6930 notify->setMessage("output-format", mCodec->mOutputFormat); 6931 6932 sp<IGraphicBufferProducer> bufferProducer; 6933 if (err == OK) { 6934 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6935 err = mCodec->mOMX->createInputSurface( 6936 mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, 6937 &mCodec->mInputMetadataType); 6938 // framework uses ANW buffers internally instead of gralloc handles 6939 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6940 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6941 } 6942 } 6943 6944 if (err == OK) { 6945 err = setupInputSurface(); 6946 } 6947 6948 if (err == OK) { 6949 notify->setObject("input-surface", 6950 new BufferProducerWrapper(bufferProducer)); 6951 } else { 6952 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6953 // the error through because it's in the "configured" state. We 6954 // send a kWhatInputSurfaceCreated with an error value instead. 6955 ALOGE("[%s] onCreateInputSurface returning error %d", 6956 mCodec->mComponentName.c_str(), err); 6957 notify->setInt32("err", err); 6958 } 6959 notify->post(); 6960} 6961 6962void ACodec::LoadedState::onSetInputSurface( 6963 const sp<AMessage> &msg) { 6964 ALOGV("onSetInputSurface"); 6965 6966 sp<AMessage> notify = mCodec->mNotify->dup(); 6967 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6968 6969 sp<RefBase> obj; 6970 CHECK(msg->findObject("input-surface", &obj)); 6971 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6972 6973 android_dataspace dataSpace; 6974 status_t err = 6975 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6976 notify->setMessage("input-format", mCodec->mInputFormat); 6977 notify->setMessage("output-format", mCodec->mOutputFormat); 6978 6979 if (err == OK) { 6980 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6981 err = mCodec->mOMX->setInputSurface( 6982 mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(), 6983 &mCodec->mInputMetadataType); 6984 // framework uses ANW buffers internally instead of gralloc handles 6985 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6986 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6987 } 6988 } 6989 6990 if (err == OK) { 6991 surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace); 6992 err = setupInputSurface(); 6993 } 6994 6995 if (err != OK) { 6996 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6997 // the error through because it's in the "configured" state. We 6998 // send a kWhatInputSurfaceAccepted with an error value instead. 6999 ALOGE("[%s] onSetInputSurface returning error %d", 7000 mCodec->mComponentName.c_str(), err); 7001 notify->setInt32("err", err); 7002 } 7003 notify->post(); 7004} 7005 7006void ACodec::LoadedState::onStart() { 7007 ALOGV("onStart"); 7008 7009 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 7010 if (err != OK) { 7011 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7012 } else { 7013 mCodec->changeState(mCodec->mLoadedToIdleState); 7014 } 7015} 7016 7017//////////////////////////////////////////////////////////////////////////////// 7018 7019ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 7020 : BaseState(codec) { 7021} 7022 7023void ACodec::LoadedToIdleState::stateEntered() { 7024 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 7025 7026 status_t err; 7027 if ((err = allocateBuffers()) != OK) { 7028 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 7029 "(error 0x%08x)", 7030 err); 7031 7032 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7033 7034 mCodec->mOMX->sendCommand( 7035 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7036 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 7037 mCodec->freeBuffersOnPort(kPortIndexInput); 7038 } 7039 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 7040 mCodec->freeBuffersOnPort(kPortIndexOutput); 7041 } 7042 7043 mCodec->changeState(mCodec->mLoadedState); 7044 } 7045} 7046 7047status_t ACodec::LoadedToIdleState::allocateBuffers() { 7048 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 7049 7050 if (err != OK) { 7051 return err; 7052 } 7053 7054 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 7055} 7056 7057bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7058 switch (msg->what()) { 7059 case kWhatSetParameters: 7060 case kWhatShutdown: 7061 { 7062 mCodec->deferMessage(msg); 7063 return true; 7064 } 7065 7066 case kWhatSignalEndOfInputStream: 7067 { 7068 mCodec->onSignalEndOfInputStream(); 7069 return true; 7070 } 7071 7072 case kWhatResume: 7073 { 7074 // We'll be active soon enough. 7075 return true; 7076 } 7077 7078 case kWhatFlush: 7079 { 7080 // We haven't even started yet, so we're flushed alright... 7081 sp<AMessage> notify = mCodec->mNotify->dup(); 7082 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7083 notify->post(); 7084 return true; 7085 } 7086 7087 default: 7088 return BaseState::onMessageReceived(msg); 7089 } 7090} 7091 7092bool ACodec::LoadedToIdleState::onOMXEvent( 7093 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7094 switch (event) { 7095 case OMX_EventCmdComplete: 7096 { 7097 status_t err = OK; 7098 if (data1 != (OMX_U32)OMX_CommandStateSet 7099 || data2 != (OMX_U32)OMX_StateIdle) { 7100 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 7101 asString((OMX_COMMANDTYPE)data1), data1, 7102 asString((OMX_STATETYPE)data2), data2); 7103 err = FAILED_TRANSACTION; 7104 } 7105 7106 if (err == OK) { 7107 err = mCodec->mOMX->sendCommand( 7108 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting); 7109 } 7110 7111 if (err != OK) { 7112 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7113 } else { 7114 mCodec->changeState(mCodec->mIdleToExecutingState); 7115 } 7116 7117 return true; 7118 } 7119 7120 default: 7121 return BaseState::onOMXEvent(event, data1, data2); 7122 } 7123} 7124 7125//////////////////////////////////////////////////////////////////////////////// 7126 7127ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 7128 : BaseState(codec) { 7129} 7130 7131void ACodec::IdleToExecutingState::stateEntered() { 7132 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 7133} 7134 7135bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 7136 switch (msg->what()) { 7137 case kWhatSetParameters: 7138 case kWhatShutdown: 7139 { 7140 mCodec->deferMessage(msg); 7141 return true; 7142 } 7143 7144 case kWhatResume: 7145 { 7146 // We'll be active soon enough. 7147 return true; 7148 } 7149 7150 case kWhatFlush: 7151 { 7152 // We haven't even started yet, so we're flushed alright... 7153 sp<AMessage> notify = mCodec->mNotify->dup(); 7154 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7155 notify->post(); 7156 7157 return true; 7158 } 7159 7160 case kWhatSignalEndOfInputStream: 7161 { 7162 mCodec->onSignalEndOfInputStream(); 7163 return true; 7164 } 7165 7166 default: 7167 return BaseState::onMessageReceived(msg); 7168 } 7169} 7170 7171bool ACodec::IdleToExecutingState::onOMXEvent( 7172 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7173 switch (event) { 7174 case OMX_EventCmdComplete: 7175 { 7176 if (data1 != (OMX_U32)OMX_CommandStateSet 7177 || data2 != (OMX_U32)OMX_StateExecuting) { 7178 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 7179 asString((OMX_COMMANDTYPE)data1), data1, 7180 asString((OMX_STATETYPE)data2), data2); 7181 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7182 return true; 7183 } 7184 7185 mCodec->mExecutingState->resume(); 7186 mCodec->changeState(mCodec->mExecutingState); 7187 7188 return true; 7189 } 7190 7191 default: 7192 return BaseState::onOMXEvent(event, data1, data2); 7193 } 7194} 7195 7196//////////////////////////////////////////////////////////////////////////////// 7197 7198ACodec::ExecutingState::ExecutingState(ACodec *codec) 7199 : BaseState(codec), 7200 mActive(false) { 7201} 7202 7203ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 7204 OMX_U32 /* portIndex */) { 7205 return RESUBMIT_BUFFERS; 7206} 7207 7208void ACodec::ExecutingState::submitOutputMetaBuffers() { 7209 // submit as many buffers as there are input buffers with the codec 7210 // in case we are in port reconfiguring 7211 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 7212 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7213 7214 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 7215 if (mCodec->submitOutputMetadataBuffer() != OK) 7216 break; 7217 } 7218 } 7219 7220 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7221 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7222} 7223 7224void ACodec::ExecutingState::submitRegularOutputBuffers() { 7225 bool failed = false; 7226 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 7227 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 7228 7229 if (mCodec->mNativeWindow != NULL) { 7230 if (info->mStatus != BufferInfo::OWNED_BY_US 7231 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 7232 ALOGE("buffers should be owned by us or the surface"); 7233 failed = true; 7234 break; 7235 } 7236 7237 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 7238 continue; 7239 } 7240 } else { 7241 if (info->mStatus != BufferInfo::OWNED_BY_US) { 7242 ALOGE("buffers should be owned by us"); 7243 failed = true; 7244 break; 7245 } 7246 } 7247 7248 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 7249 7250 info->checkWriteFence("submitRegularOutputBuffers"); 7251 status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 7252 info->mFenceFd = -1; 7253 if (err != OK) { 7254 failed = true; 7255 break; 7256 } 7257 7258 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 7259 } 7260 7261 if (failed) { 7262 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7263 } 7264} 7265 7266void ACodec::ExecutingState::submitOutputBuffers() { 7267 submitRegularOutputBuffers(); 7268 if (mCodec->storingMetadataInDecodedBuffers()) { 7269 submitOutputMetaBuffers(); 7270 } 7271} 7272 7273void ACodec::ExecutingState::resume() { 7274 if (mActive) { 7275 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 7276 return; 7277 } 7278 7279 submitOutputBuffers(); 7280 7281 // Post all available input buffers 7282 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 7283 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 7284 } 7285 7286 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 7287 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7288 if (info->mStatus == BufferInfo::OWNED_BY_US) { 7289 postFillThisBuffer(info); 7290 } 7291 } 7292 7293 mActive = true; 7294} 7295 7296void ACodec::ExecutingState::stateEntered() { 7297 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 7298 7299 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7300 mCodec->processDeferredMessages(); 7301} 7302 7303bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 7304 bool handled = false; 7305 7306 switch (msg->what()) { 7307 case kWhatShutdown: 7308 { 7309 int32_t keepComponentAllocated; 7310 CHECK(msg->findInt32( 7311 "keepComponentAllocated", &keepComponentAllocated)); 7312 7313 mCodec->mShutdownInProgress = true; 7314 mCodec->mExplicitShutdown = true; 7315 mCodec->mKeepComponentAllocated = keepComponentAllocated; 7316 7317 mActive = false; 7318 7319 status_t err = mCodec->mOMX->sendCommand( 7320 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 7321 if (err != OK) { 7322 if (keepComponentAllocated) { 7323 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7324 } 7325 // TODO: do some recovery here. 7326 } else { 7327 mCodec->changeState(mCodec->mExecutingToIdleState); 7328 } 7329 7330 handled = true; 7331 break; 7332 } 7333 7334 case kWhatFlush: 7335 { 7336 ALOGV("[%s] ExecutingState flushing now " 7337 "(codec owns %zu/%zu input, %zu/%zu output).", 7338 mCodec->mComponentName.c_str(), 7339 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 7340 mCodec->mBuffers[kPortIndexInput].size(), 7341 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 7342 mCodec->mBuffers[kPortIndexOutput].size()); 7343 7344 mActive = false; 7345 7346 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL); 7347 if (err != OK) { 7348 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7349 } else { 7350 mCodec->changeState(mCodec->mFlushingState); 7351 } 7352 7353 handled = true; 7354 break; 7355 } 7356 7357 case kWhatResume: 7358 { 7359 resume(); 7360 7361 handled = true; 7362 break; 7363 } 7364 7365 case kWhatRequestIDRFrame: 7366 { 7367 status_t err = mCodec->requestIDRFrame(); 7368 if (err != OK) { 7369 ALOGW("Requesting an IDR frame failed."); 7370 } 7371 7372 handled = true; 7373 break; 7374 } 7375 7376 case kWhatSetParameters: 7377 { 7378 sp<AMessage> params; 7379 CHECK(msg->findMessage("params", ¶ms)); 7380 7381 status_t err = mCodec->setParameters(params); 7382 7383 sp<AMessage> reply; 7384 if (msg->findMessage("reply", &reply)) { 7385 reply->setInt32("err", err); 7386 reply->post(); 7387 } 7388 7389 handled = true; 7390 break; 7391 } 7392 7393 case ACodec::kWhatSignalEndOfInputStream: 7394 { 7395 mCodec->onSignalEndOfInputStream(); 7396 handled = true; 7397 break; 7398 } 7399 7400 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7401 case kWhatSubmitOutputMetadataBufferIfEOS: 7402 { 7403 if (mCodec->mPortEOS[kPortIndexInput] && 7404 !mCodec->mPortEOS[kPortIndexOutput]) { 7405 status_t err = mCodec->submitOutputMetadataBuffer(); 7406 if (err == OK) { 7407 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7408 } 7409 } 7410 return true; 7411 } 7412 7413 default: 7414 handled = BaseState::onMessageReceived(msg); 7415 break; 7416 } 7417 7418 return handled; 7419} 7420 7421status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 7422 int32_t videoBitrate; 7423 if (params->findInt32("video-bitrate", &videoBitrate)) { 7424 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 7425 InitOMXParams(&configParams); 7426 configParams.nPortIndex = kPortIndexOutput; 7427 configParams.nEncodeBitrate = videoBitrate; 7428 7429 status_t err = mOMX->setConfig( 7430 mNode, 7431 OMX_IndexConfigVideoBitrate, 7432 &configParams, 7433 sizeof(configParams)); 7434 7435 if (err != OK) { 7436 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 7437 videoBitrate, err); 7438 7439 return err; 7440 } 7441 } 7442 7443 int64_t skipFramesBeforeUs; 7444 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 7445 status_t err = 7446 mOMX->setInternalOption( 7447 mNode, 7448 kPortIndexInput, 7449 IOMX::INTERNAL_OPTION_START_TIME, 7450 &skipFramesBeforeUs, 7451 sizeof(skipFramesBeforeUs)); 7452 7453 if (err != OK) { 7454 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 7455 return err; 7456 } 7457 } 7458 7459 int32_t dropInputFrames; 7460 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 7461 bool suspend = dropInputFrames != 0; 7462 7463 status_t err = 7464 mOMX->setInternalOption( 7465 mNode, 7466 kPortIndexInput, 7467 IOMX::INTERNAL_OPTION_SUSPEND, 7468 &suspend, 7469 sizeof(suspend)); 7470 7471 if (err != OK) { 7472 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 7473 return err; 7474 } 7475 } 7476 7477 int32_t dummy; 7478 if (params->findInt32("request-sync", &dummy)) { 7479 status_t err = requestIDRFrame(); 7480 7481 if (err != OK) { 7482 ALOGE("Requesting a sync frame failed w/ err %d", err); 7483 return err; 7484 } 7485 } 7486 7487 float rate; 7488 if (params->findFloat("operating-rate", &rate) && rate > 0) { 7489 status_t err = setOperatingRate(rate, mIsVideo); 7490 if (err != OK) { 7491 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 7492 return err; 7493 } 7494 } 7495 7496 int32_t intraRefreshPeriod = 0; 7497 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 7498 && intraRefreshPeriod > 0) { 7499 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 7500 if (err != OK) { 7501 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 7502 mComponentName.c_str()); 7503 err = OK; 7504 } 7505 } 7506 7507 status_t err = configureTemporalLayers(params, false /* inConfigure */, mOutputFormat); 7508 if (err != OK) { 7509 err = OK; // ignore failure 7510 } 7511 7512 return err; 7513} 7514 7515void ACodec::onSignalEndOfInputStream() { 7516 sp<AMessage> notify = mNotify->dup(); 7517 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 7518 7519 status_t err = mOMX->signalEndOfInputStream(mNode); 7520 if (err != OK) { 7521 notify->setInt32("err", err); 7522 } 7523 notify->post(); 7524} 7525 7526bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 7527 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7528 return true; 7529} 7530 7531bool ACodec::ExecutingState::onOMXEvent( 7532 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7533 switch (event) { 7534 case OMX_EventPortSettingsChanged: 7535 { 7536 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 7537 7538 mCodec->onOutputFormatChanged(); 7539 7540 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 7541 mCodec->mMetadataBuffersToSubmit = 0; 7542 CHECK_EQ(mCodec->mOMX->sendCommand( 7543 mCodec->mNode, 7544 OMX_CommandPortDisable, kPortIndexOutput), 7545 (status_t)OK); 7546 7547 mCodec->freeOutputBuffersNotOwnedByComponent(); 7548 7549 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7550 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7551 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7552 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7553 mCodec->mComponentName.c_str(), data2); 7554 } 7555 7556 return true; 7557 } 7558 7559 case OMX_EventBufferFlag: 7560 { 7561 return true; 7562 } 7563 7564 default: 7565 return BaseState::onOMXEvent(event, data1, data2); 7566 } 7567} 7568 7569//////////////////////////////////////////////////////////////////////////////// 7570 7571ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7572 ACodec *codec) 7573 : BaseState(codec) { 7574} 7575 7576ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7577 OMX_U32 portIndex) { 7578 if (portIndex == kPortIndexOutput) { 7579 return FREE_BUFFERS; 7580 } 7581 7582 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7583 7584 return RESUBMIT_BUFFERS; 7585} 7586 7587bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7588 const sp<AMessage> &msg) { 7589 bool handled = false; 7590 7591 switch (msg->what()) { 7592 case kWhatFlush: 7593 case kWhatShutdown: 7594 case kWhatResume: 7595 case kWhatSetParameters: 7596 { 7597 if (msg->what() == kWhatResume) { 7598 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7599 } 7600 7601 mCodec->deferMessage(msg); 7602 handled = true; 7603 break; 7604 } 7605 7606 default: 7607 handled = BaseState::onMessageReceived(msg); 7608 break; 7609 } 7610 7611 return handled; 7612} 7613 7614void ACodec::OutputPortSettingsChangedState::stateEntered() { 7615 ALOGV("[%s] Now handling output port settings change", 7616 mCodec->mComponentName.c_str()); 7617} 7618 7619bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7620 int64_t mediaTimeUs, nsecs_t systemNano) { 7621 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7622 return true; 7623} 7624 7625bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7626 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7627 switch (event) { 7628 case OMX_EventCmdComplete: 7629 { 7630 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7631 if (data2 != (OMX_U32)kPortIndexOutput) { 7632 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7633 return false; 7634 } 7635 7636 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7637 7638 status_t err = OK; 7639 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7640 ALOGE("disabled port should be empty, but has %zu buffers", 7641 mCodec->mBuffers[kPortIndexOutput].size()); 7642 err = FAILED_TRANSACTION; 7643 } else { 7644 mCodec->mDealer[kPortIndexOutput].clear(); 7645 } 7646 7647 if (err == OK) { 7648 err = mCodec->mOMX->sendCommand( 7649 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput); 7650 } 7651 7652 if (err == OK) { 7653 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7654 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7655 "reconfiguration: (%d)", err); 7656 } 7657 7658 if (err != OK) { 7659 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7660 7661 // This is technically not correct, but appears to be 7662 // the only way to free the component instance. 7663 // Controlled transitioning from excecuting->idle 7664 // and idle->loaded seem impossible probably because 7665 // the output port never finishes re-enabling. 7666 mCodec->mShutdownInProgress = true; 7667 mCodec->mKeepComponentAllocated = false; 7668 mCodec->changeState(mCodec->mLoadedState); 7669 } 7670 7671 return true; 7672 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7673 if (data2 != (OMX_U32)kPortIndexOutput) { 7674 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7675 return false; 7676 } 7677 7678 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7679 7680 if (mCodec->mExecutingState->active()) { 7681 mCodec->mExecutingState->submitOutputBuffers(); 7682 } 7683 7684 mCodec->changeState(mCodec->mExecutingState); 7685 7686 return true; 7687 } 7688 7689 return false; 7690 } 7691 7692 default: 7693 return false; 7694 } 7695} 7696 7697//////////////////////////////////////////////////////////////////////////////// 7698 7699ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7700 : BaseState(codec), 7701 mComponentNowIdle(false) { 7702} 7703 7704bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7705 bool handled = false; 7706 7707 switch (msg->what()) { 7708 case kWhatFlush: 7709 { 7710 // Don't send me a flush request if you previously wanted me 7711 // to shutdown. 7712 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7713 break; 7714 } 7715 7716 case kWhatShutdown: 7717 { 7718 // We're already doing that... 7719 7720 handled = true; 7721 break; 7722 } 7723 7724 default: 7725 handled = BaseState::onMessageReceived(msg); 7726 break; 7727 } 7728 7729 return handled; 7730} 7731 7732void ACodec::ExecutingToIdleState::stateEntered() { 7733 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7734 7735 mComponentNowIdle = false; 7736 mCodec->mLastOutputFormat.clear(); 7737} 7738 7739bool ACodec::ExecutingToIdleState::onOMXEvent( 7740 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7741 switch (event) { 7742 case OMX_EventCmdComplete: 7743 { 7744 if (data1 != (OMX_U32)OMX_CommandStateSet 7745 || data2 != (OMX_U32)OMX_StateIdle) { 7746 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7747 asString((OMX_COMMANDTYPE)data1), data1, 7748 asString((OMX_STATETYPE)data2), data2); 7749 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7750 return true; 7751 } 7752 7753 mComponentNowIdle = true; 7754 7755 changeStateIfWeOwnAllBuffers(); 7756 7757 return true; 7758 } 7759 7760 case OMX_EventPortSettingsChanged: 7761 case OMX_EventBufferFlag: 7762 { 7763 // We're shutting down and don't care about this anymore. 7764 return true; 7765 } 7766 7767 default: 7768 return BaseState::onOMXEvent(event, data1, data2); 7769 } 7770} 7771 7772void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7773 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7774 status_t err = mCodec->mOMX->sendCommand( 7775 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7776 if (err == OK) { 7777 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7778 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7779 if (err == OK) { 7780 err = err2; 7781 } 7782 } 7783 7784 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7785 && mCodec->mNativeWindow != NULL) { 7786 // We push enough 1x1 blank buffers to ensure that one of 7787 // them has made it to the display. This allows the OMX 7788 // component teardown to zero out any protected buffers 7789 // without the risk of scanning out one of those buffers. 7790 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7791 } 7792 7793 if (err != OK) { 7794 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7795 return; 7796 } 7797 7798 mCodec->changeState(mCodec->mIdleToLoadedState); 7799 } 7800} 7801 7802void ACodec::ExecutingToIdleState::onInputBufferFilled( 7803 const sp<AMessage> &msg) { 7804 BaseState::onInputBufferFilled(msg); 7805 7806 changeStateIfWeOwnAllBuffers(); 7807} 7808 7809void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7810 const sp<AMessage> &msg) { 7811 BaseState::onOutputBufferDrained(msg); 7812 7813 changeStateIfWeOwnAllBuffers(); 7814} 7815 7816//////////////////////////////////////////////////////////////////////////////// 7817 7818ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7819 : BaseState(codec) { 7820} 7821 7822bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7823 bool handled = false; 7824 7825 switch (msg->what()) { 7826 case kWhatShutdown: 7827 { 7828 // We're already doing that... 7829 7830 handled = true; 7831 break; 7832 } 7833 7834 case kWhatFlush: 7835 { 7836 // Don't send me a flush request if you previously wanted me 7837 // to shutdown. 7838 ALOGE("Got flush request in IdleToLoadedState"); 7839 break; 7840 } 7841 7842 default: 7843 handled = BaseState::onMessageReceived(msg); 7844 break; 7845 } 7846 7847 return handled; 7848} 7849 7850void ACodec::IdleToLoadedState::stateEntered() { 7851 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7852} 7853 7854bool ACodec::IdleToLoadedState::onOMXEvent( 7855 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7856 switch (event) { 7857 case OMX_EventCmdComplete: 7858 { 7859 if (data1 != (OMX_U32)OMX_CommandStateSet 7860 || data2 != (OMX_U32)OMX_StateLoaded) { 7861 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7862 asString((OMX_COMMANDTYPE)data1), data1, 7863 asString((OMX_STATETYPE)data2), data2); 7864 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7865 return true; 7866 } 7867 7868 mCodec->changeState(mCodec->mLoadedState); 7869 7870 return true; 7871 } 7872 7873 default: 7874 return BaseState::onOMXEvent(event, data1, data2); 7875 } 7876} 7877 7878//////////////////////////////////////////////////////////////////////////////// 7879 7880ACodec::FlushingState::FlushingState(ACodec *codec) 7881 : BaseState(codec) { 7882} 7883 7884void ACodec::FlushingState::stateEntered() { 7885 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7886 7887 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7888} 7889 7890bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7891 bool handled = false; 7892 7893 switch (msg->what()) { 7894 case kWhatShutdown: 7895 { 7896 mCodec->deferMessage(msg); 7897 break; 7898 } 7899 7900 case kWhatFlush: 7901 { 7902 // We're already doing this right now. 7903 handled = true; 7904 break; 7905 } 7906 7907 default: 7908 handled = BaseState::onMessageReceived(msg); 7909 break; 7910 } 7911 7912 return handled; 7913} 7914 7915bool ACodec::FlushingState::onOMXEvent( 7916 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7917 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7918 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7919 7920 switch (event) { 7921 case OMX_EventCmdComplete: 7922 { 7923 if (data1 != (OMX_U32)OMX_CommandFlush) { 7924 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7925 asString((OMX_COMMANDTYPE)data1), data1, data2); 7926 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7927 return true; 7928 } 7929 7930 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7931 if (mFlushComplete[data2]) { 7932 ALOGW("Flush already completed for %s port", 7933 data2 == kPortIndexInput ? "input" : "output"); 7934 return true; 7935 } 7936 mFlushComplete[data2] = true; 7937 7938 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7939 changeStateIfWeOwnAllBuffers(); 7940 } 7941 } else if (data2 == OMX_ALL) { 7942 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7943 ALOGW("received flush complete event for OMX_ALL before ports have been" 7944 "flushed (%d/%d)", 7945 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7946 return false; 7947 } 7948 7949 changeStateIfWeOwnAllBuffers(); 7950 } else { 7951 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7952 } 7953 7954 return true; 7955 } 7956 7957 case OMX_EventPortSettingsChanged: 7958 { 7959 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7960 msg->setInt32("type", omx_message::EVENT); 7961 msg->setInt32("node", mCodec->mNode); 7962 msg->setInt32("event", event); 7963 msg->setInt32("data1", data1); 7964 msg->setInt32("data2", data2); 7965 7966 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7967 mCodec->mComponentName.c_str()); 7968 7969 mCodec->deferMessage(msg); 7970 7971 return true; 7972 } 7973 7974 default: 7975 return BaseState::onOMXEvent(event, data1, data2); 7976 } 7977 7978 return true; 7979} 7980 7981void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7982 BaseState::onOutputBufferDrained(msg); 7983 7984 changeStateIfWeOwnAllBuffers(); 7985} 7986 7987void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7988 BaseState::onInputBufferFilled(msg); 7989 7990 changeStateIfWeOwnAllBuffers(); 7991} 7992 7993void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 7994 if (mFlushComplete[kPortIndexInput] 7995 && mFlushComplete[kPortIndexOutput] 7996 && mCodec->allYourBuffersAreBelongToUs()) { 7997 // We now own all buffers except possibly those still queued with 7998 // the native window for rendering. Let's get those back as well. 7999 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 8000 8001 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 8002 8003 sp<AMessage> notify = mCodec->mNotify->dup(); 8004 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 8005 notify->post(); 8006 8007 mCodec->mPortEOS[kPortIndexInput] = 8008 mCodec->mPortEOS[kPortIndexOutput] = false; 8009 8010 mCodec->mInputEOSResult = OK; 8011 8012 if (mCodec->mSkipCutBuffer != NULL) { 8013 mCodec->mSkipCutBuffer->clear(); 8014 } 8015 8016 mCodec->changeState(mCodec->mExecutingState); 8017 } 8018} 8019 8020status_t ACodec::queryCapabilities( 8021 const AString &name, const AString &mime, bool isEncoder, 8022 sp<MediaCodecInfo::Capabilities> *caps) { 8023 (*caps).clear(); 8024 const char *role = getComponentRole(isEncoder, mime.c_str()); 8025 if (role == NULL) { 8026 return BAD_VALUE; 8027 } 8028 8029 OMXClient client; 8030 status_t err = client.connect(); 8031 if (err != OK) { 8032 return err; 8033 } 8034 8035 sp<IOMX> omx = client.interface(); 8036 sp<CodecObserver> observer = new CodecObserver; 8037 IOMX::node_id node = 0; 8038 8039 err = omx->allocateNode(name.c_str(), observer, NULL, &node); 8040 if (err != OK) { 8041 client.disconnect(); 8042 return err; 8043 } 8044 8045 err = setComponentRole(omx, node, role); 8046 if (err != OK) { 8047 omx->freeNode(node); 8048 client.disconnect(); 8049 return err; 8050 } 8051 8052 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 8053 bool isVideo = mime.startsWithIgnoreCase("video/"); 8054 8055 if (isVideo) { 8056 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 8057 InitOMXParams(¶m); 8058 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 8059 8060 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 8061 param.nProfileIndex = index; 8062 status_t err = omx->getParameter( 8063 node, OMX_IndexParamVideoProfileLevelQuerySupported, 8064 ¶m, sizeof(param)); 8065 if (err != OK) { 8066 break; 8067 } 8068 builder->addProfileLevel(param.eProfile, param.eLevel); 8069 8070 if (index == kMaxIndicesToCheck) { 8071 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 8072 name.c_str(), index, 8073 param.eProfile, param.eLevel); 8074 } 8075 } 8076 8077 // Color format query 8078 // return colors in the order reported by the OMX component 8079 // prefix "flexible" standard ones with the flexible equivalent 8080 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 8081 InitOMXParams(&portFormat); 8082 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 8083 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 8084 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 8085 portFormat.nIndex = index; 8086 status_t err = omx->getParameter( 8087 node, OMX_IndexParamVideoPortFormat, 8088 &portFormat, sizeof(portFormat)); 8089 if (err != OK) { 8090 break; 8091 } 8092 8093 OMX_U32 flexibleEquivalent; 8094 if (isFlexibleColorFormat( 8095 omx, node, portFormat.eColorFormat, false /* usingNativeWindow */, 8096 &flexibleEquivalent)) { 8097 bool marked = false; 8098 for (size_t i = 0; i < supportedColors.size(); ++i) { 8099 if (supportedColors[i] == flexibleEquivalent) { 8100 marked = true; 8101 break; 8102 } 8103 } 8104 if (!marked) { 8105 supportedColors.push(flexibleEquivalent); 8106 builder->addColorFormat(flexibleEquivalent); 8107 } 8108 } 8109 supportedColors.push(portFormat.eColorFormat); 8110 builder->addColorFormat(portFormat.eColorFormat); 8111 8112 if (index == kMaxIndicesToCheck) { 8113 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 8114 name.c_str(), index, 8115 asString(portFormat.eColorFormat), portFormat.eColorFormat); 8116 } 8117 } 8118 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 8119 // More audio codecs if they have profiles. 8120 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 8121 InitOMXParams(¶m); 8122 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 8123 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 8124 param.nProfileIndex = index; 8125 status_t err = omx->getParameter( 8126 node, (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 8127 ¶m, sizeof(param)); 8128 if (err != OK) { 8129 break; 8130 } 8131 // For audio, level is ignored. 8132 builder->addProfileLevel(param.eProfile, 0 /* level */); 8133 8134 if (index == kMaxIndicesToCheck) { 8135 ALOGW("[%s] stopping checking profiles after %u: %x", 8136 name.c_str(), index, 8137 param.eProfile); 8138 } 8139 } 8140 8141 // NOTE: Without Android extensions, OMX does not provide a way to query 8142 // AAC profile support 8143 if (param.nProfileIndex == 0) { 8144 ALOGW("component %s doesn't support profile query.", name.c_str()); 8145 } 8146 } 8147 8148 if (isVideo && !isEncoder) { 8149 native_handle_t *sidebandHandle = NULL; 8150 if (omx->configureVideoTunnelMode( 8151 node, kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 8152 // tunneled playback includes adaptive playback 8153 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 8154 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 8155 } else if (omx->storeMetaDataInBuffers( 8156 node, kPortIndexOutput, OMX_TRUE) == OK || 8157 omx->prepareForAdaptivePlayback( 8158 node, kPortIndexOutput, OMX_TRUE, 8159 1280 /* width */, 720 /* height */) == OK) { 8160 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 8161 } 8162 } 8163 8164 if (isVideo && isEncoder) { 8165 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 8166 InitOMXParams(¶ms); 8167 params.nPortIndex = kPortIndexOutput; 8168 // TODO: should we verify if fallback is supported? 8169 if (omx->getConfig( 8170 node, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 8171 ¶ms, sizeof(params)) == OK) { 8172 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 8173 } 8174 } 8175 8176 *caps = builder; 8177 omx->freeNode(node); 8178 client.disconnect(); 8179 return OK; 8180} 8181 8182// These are supposed be equivalent to the logic in 8183// "audio_channel_out_mask_from_count". 8184//static 8185status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 8186 switch (numChannels) { 8187 case 1: 8188 map[0] = OMX_AUDIO_ChannelCF; 8189 break; 8190 case 2: 8191 map[0] = OMX_AUDIO_ChannelLF; 8192 map[1] = OMX_AUDIO_ChannelRF; 8193 break; 8194 case 3: 8195 map[0] = OMX_AUDIO_ChannelLF; 8196 map[1] = OMX_AUDIO_ChannelRF; 8197 map[2] = OMX_AUDIO_ChannelCF; 8198 break; 8199 case 4: 8200 map[0] = OMX_AUDIO_ChannelLF; 8201 map[1] = OMX_AUDIO_ChannelRF; 8202 map[2] = OMX_AUDIO_ChannelLR; 8203 map[3] = OMX_AUDIO_ChannelRR; 8204 break; 8205 case 5: 8206 map[0] = OMX_AUDIO_ChannelLF; 8207 map[1] = OMX_AUDIO_ChannelRF; 8208 map[2] = OMX_AUDIO_ChannelCF; 8209 map[3] = OMX_AUDIO_ChannelLR; 8210 map[4] = OMX_AUDIO_ChannelRR; 8211 break; 8212 case 6: 8213 map[0] = OMX_AUDIO_ChannelLF; 8214 map[1] = OMX_AUDIO_ChannelRF; 8215 map[2] = OMX_AUDIO_ChannelCF; 8216 map[3] = OMX_AUDIO_ChannelLFE; 8217 map[4] = OMX_AUDIO_ChannelLR; 8218 map[5] = OMX_AUDIO_ChannelRR; 8219 break; 8220 case 7: 8221 map[0] = OMX_AUDIO_ChannelLF; 8222 map[1] = OMX_AUDIO_ChannelRF; 8223 map[2] = OMX_AUDIO_ChannelCF; 8224 map[3] = OMX_AUDIO_ChannelLFE; 8225 map[4] = OMX_AUDIO_ChannelLR; 8226 map[5] = OMX_AUDIO_ChannelRR; 8227 map[6] = OMX_AUDIO_ChannelCS; 8228 break; 8229 case 8: 8230 map[0] = OMX_AUDIO_ChannelLF; 8231 map[1] = OMX_AUDIO_ChannelRF; 8232 map[2] = OMX_AUDIO_ChannelCF; 8233 map[3] = OMX_AUDIO_ChannelLFE; 8234 map[4] = OMX_AUDIO_ChannelLR; 8235 map[5] = OMX_AUDIO_ChannelRR; 8236 map[6] = OMX_AUDIO_ChannelLS; 8237 map[7] = OMX_AUDIO_ChannelRS; 8238 break; 8239 default: 8240 return -EINVAL; 8241 } 8242 8243 return OK; 8244} 8245 8246} // namespace android 8247