ACodec.cpp revision bb88069079c3e406d4a72646fc9d65d2e802df90
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47 48#include <OMX_AudioExt.h> 49#include <OMX_VideoExt.h> 50#include <OMX_Component.h> 51#include <OMX_IndexExt.h> 52#include <OMX_AsString.h> 53 54#include "include/avc_utils.h" 55#include "include/DataConverter.h" 56#include "omx/OMXUtils.h" 57 58namespace android { 59 60enum { 61 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 62}; 63 64// OMX errors are directly mapped into status_t range if 65// there is no corresponding MediaError status code. 66// Use the statusFromOMXError(int32_t omxError) function. 67// 68// Currently this is a direct map. 69// See frameworks/native/include/media/openmax/OMX_Core.h 70// 71// Vendor OMX errors from 0x90000000 - 0x9000FFFF 72// Extension OMX errors from 0x8F000000 - 0x90000000 73// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 74// 75 76// returns true if err is a recognized OMX error code. 77// as OMX error is OMX_S32, this is an int32_t type 78static inline bool isOMXError(int32_t err) { 79 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 80} 81 82// converts an OMX error to a status_t 83static inline status_t statusFromOMXError(int32_t omxError) { 84 switch (omxError) { 85 case OMX_ErrorInvalidComponentName: 86 case OMX_ErrorComponentNotFound: 87 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 88 default: 89 return isOMXError(omxError) ? omxError : 0; // no translation required 90 } 91} 92 93// checks and converts status_t to a non-side-effect status_t 94static inline status_t makeNoSideEffectStatus(status_t err) { 95 switch (err) { 96 // the following errors have side effects and may come 97 // from other code modules. Remap for safety reasons. 98 case INVALID_OPERATION: 99 case DEAD_OBJECT: 100 return UNKNOWN_ERROR; 101 default: 102 return err; 103 } 104} 105 106struct MessageList : public RefBase { 107 MessageList() { 108 } 109 virtual ~MessageList() { 110 } 111 std::list<sp<AMessage> > &getList() { return mList; } 112private: 113 std::list<sp<AMessage> > mList; 114 115 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 116}; 117 118static sp<DataConverter> getCopyConverter() { 119 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited 120 static sp<DataConverter> sCopyConverter; // zero-inited 121 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); 122 return sCopyConverter; 123} 124 125struct CodecObserver : public BnOMXObserver { 126 CodecObserver() {} 127 128 void setNotificationMessage(const sp<AMessage> &msg) { 129 mNotify = msg; 130 } 131 132 // from IOMXObserver 133 virtual void onMessages(const std::list<omx_message> &messages) { 134 if (messages.empty()) { 135 return; 136 } 137 138 sp<AMessage> notify = mNotify->dup(); 139 bool first = true; 140 sp<MessageList> msgList = new MessageList(); 141 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 142 it != messages.cend(); ++it) { 143 const omx_message &omx_msg = *it; 144 if (first) { 145 notify->setInt32("node", omx_msg.node); 146 first = false; 147 } 148 149 sp<AMessage> msg = new AMessage; 150 msg->setInt32("type", omx_msg.type); 151 switch (omx_msg.type) { 152 case omx_message::EVENT: 153 { 154 msg->setInt32("event", omx_msg.u.event_data.event); 155 msg->setInt32("data1", omx_msg.u.event_data.data1); 156 msg->setInt32("data2", omx_msg.u.event_data.data2); 157 break; 158 } 159 160 case omx_message::EMPTY_BUFFER_DONE: 161 { 162 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 163 msg->setInt32("fence_fd", omx_msg.fenceFd); 164 break; 165 } 166 167 case omx_message::FILL_BUFFER_DONE: 168 { 169 msg->setInt32( 170 "buffer", omx_msg.u.extended_buffer_data.buffer); 171 msg->setInt32( 172 "range_offset", 173 omx_msg.u.extended_buffer_data.range_offset); 174 msg->setInt32( 175 "range_length", 176 omx_msg.u.extended_buffer_data.range_length); 177 msg->setInt32( 178 "flags", 179 omx_msg.u.extended_buffer_data.flags); 180 msg->setInt64( 181 "timestamp", 182 omx_msg.u.extended_buffer_data.timestamp); 183 msg->setInt32( 184 "fence_fd", omx_msg.fenceFd); 185 break; 186 } 187 188 case omx_message::FRAME_RENDERED: 189 { 190 msg->setInt64( 191 "media_time_us", omx_msg.u.render_data.timestamp); 192 msg->setInt64( 193 "system_nano", omx_msg.u.render_data.nanoTime); 194 break; 195 } 196 197 default: 198 ALOGE("Unrecognized message type: %d", omx_msg.type); 199 break; 200 } 201 msgList->getList().push_back(msg); 202 } 203 notify->setObject("messages", msgList); 204 notify->post(); 205 } 206 207protected: 208 virtual ~CodecObserver() {} 209 210private: 211 sp<AMessage> mNotify; 212 213 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 214}; 215 216//////////////////////////////////////////////////////////////////////////////// 217 218struct ACodec::BaseState : public AState { 219 explicit BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 220 221protected: 222 enum PortMode { 223 KEEP_BUFFERS, 224 RESUBMIT_BUFFERS, 225 FREE_BUFFERS, 226 }; 227 228 ACodec *mCodec; 229 230 virtual PortMode getPortMode(OMX_U32 portIndex); 231 232 virtual bool onMessageReceived(const sp<AMessage> &msg); 233 234 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 235 236 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 237 virtual void onInputBufferFilled(const sp<AMessage> &msg); 238 239 void postFillThisBuffer(BufferInfo *info); 240 241private: 242 // Handles an OMX message. Returns true iff message was handled. 243 bool onOMXMessage(const sp<AMessage> &msg); 244 245 // Handles a list of messages. Returns true iff messages were handled. 246 bool onOMXMessageList(const sp<AMessage> &msg); 247 248 // returns true iff this message is for this component and the component is alive 249 bool checkOMXMessage(const sp<AMessage> &msg); 250 251 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 252 253 bool onOMXFillBufferDone( 254 IOMX::buffer_id bufferID, 255 size_t rangeOffset, size_t rangeLength, 256 OMX_U32 flags, 257 int64_t timeUs, 258 int fenceFd); 259 260 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 261 262 void getMoreInputDataIfPossible(); 263 264 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 265}; 266 267//////////////////////////////////////////////////////////////////////////////// 268 269struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 270 explicit DeathNotifier(const sp<AMessage> ¬ify) 271 : mNotify(notify) { 272 } 273 274 virtual void binderDied(const wp<IBinder> &) { 275 mNotify->post(); 276 } 277 278protected: 279 virtual ~DeathNotifier() {} 280 281private: 282 sp<AMessage> mNotify; 283 284 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 285}; 286 287struct ACodec::UninitializedState : public ACodec::BaseState { 288 explicit UninitializedState(ACodec *codec); 289 290protected: 291 virtual bool onMessageReceived(const sp<AMessage> &msg); 292 virtual void stateEntered(); 293 294private: 295 void onSetup(const sp<AMessage> &msg); 296 bool onAllocateComponent(const sp<AMessage> &msg); 297 298 sp<DeathNotifier> mDeathNotifier; 299 300 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 301}; 302 303//////////////////////////////////////////////////////////////////////////////// 304 305struct ACodec::LoadedState : public ACodec::BaseState { 306 explicit LoadedState(ACodec *codec); 307 308protected: 309 virtual bool onMessageReceived(const sp<AMessage> &msg); 310 virtual void stateEntered(); 311 312private: 313 friend struct ACodec::UninitializedState; 314 315 bool onConfigureComponent(const sp<AMessage> &msg); 316 void onCreateInputSurface(const sp<AMessage> &msg); 317 void onSetInputSurface(const sp<AMessage> &msg); 318 void onStart(); 319 void onShutdown(bool keepComponentAllocated); 320 321 status_t setupInputSurface(); 322 323 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 324}; 325 326//////////////////////////////////////////////////////////////////////////////// 327 328struct ACodec::LoadedToIdleState : public ACodec::BaseState { 329 explicit LoadedToIdleState(ACodec *codec); 330 331protected: 332 virtual bool onMessageReceived(const sp<AMessage> &msg); 333 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 334 virtual void stateEntered(); 335 336private: 337 status_t allocateBuffers(); 338 339 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 340}; 341 342//////////////////////////////////////////////////////////////////////////////// 343 344struct ACodec::IdleToExecutingState : public ACodec::BaseState { 345 explicit IdleToExecutingState(ACodec *codec); 346 347protected: 348 virtual bool onMessageReceived(const sp<AMessage> &msg); 349 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 350 virtual void stateEntered(); 351 352private: 353 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 354}; 355 356//////////////////////////////////////////////////////////////////////////////// 357 358struct ACodec::ExecutingState : public ACodec::BaseState { 359 explicit ExecutingState(ACodec *codec); 360 361 void submitRegularOutputBuffers(); 362 void submitOutputMetaBuffers(); 363 void submitOutputBuffers(); 364 365 // Submit output buffers to the decoder, submit input buffers to client 366 // to fill with data. 367 void resume(); 368 369 // Returns true iff input and output buffers are in play. 370 bool active() const { return mActive; } 371 372protected: 373 virtual PortMode getPortMode(OMX_U32 portIndex); 374 virtual bool onMessageReceived(const sp<AMessage> &msg); 375 virtual void stateEntered(); 376 377 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 378 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 379 380private: 381 bool mActive; 382 383 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 384}; 385 386//////////////////////////////////////////////////////////////////////////////// 387 388struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 389 explicit OutputPortSettingsChangedState(ACodec *codec); 390 391protected: 392 virtual PortMode getPortMode(OMX_U32 portIndex); 393 virtual bool onMessageReceived(const sp<AMessage> &msg); 394 virtual void stateEntered(); 395 396 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 397 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 398 399private: 400 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 401}; 402 403//////////////////////////////////////////////////////////////////////////////// 404 405struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 406 explicit ExecutingToIdleState(ACodec *codec); 407 408protected: 409 virtual bool onMessageReceived(const sp<AMessage> &msg); 410 virtual void stateEntered(); 411 412 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 413 414 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 415 virtual void onInputBufferFilled(const sp<AMessage> &msg); 416 417private: 418 void changeStateIfWeOwnAllBuffers(); 419 420 bool mComponentNowIdle; 421 422 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 423}; 424 425//////////////////////////////////////////////////////////////////////////////// 426 427struct ACodec::IdleToLoadedState : public ACodec::BaseState { 428 explicit IdleToLoadedState(ACodec *codec); 429 430protected: 431 virtual bool onMessageReceived(const sp<AMessage> &msg); 432 virtual void stateEntered(); 433 434 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 435 436private: 437 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 438}; 439 440//////////////////////////////////////////////////////////////////////////////// 441 442struct ACodec::FlushingState : public ACodec::BaseState { 443 explicit FlushingState(ACodec *codec); 444 445protected: 446 virtual bool onMessageReceived(const sp<AMessage> &msg); 447 virtual void stateEntered(); 448 449 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 450 451 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 452 virtual void onInputBufferFilled(const sp<AMessage> &msg); 453 454private: 455 bool mFlushComplete[2]; 456 457 void changeStateIfWeOwnAllBuffers(); 458 459 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 460}; 461 462//////////////////////////////////////////////////////////////////////////////// 463 464void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 465 if (mFenceFd >= 0) { 466 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 467 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 468 } 469 mFenceFd = fenceFd; 470 mIsReadFence = false; 471} 472 473void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 474 if (mFenceFd >= 0) { 475 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 476 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 477 } 478 mFenceFd = fenceFd; 479 mIsReadFence = true; 480} 481 482void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 483 if (mFenceFd >= 0 && mIsReadFence) { 484 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 485 } 486} 487 488void ACodec::BufferInfo::checkReadFence(const char *dbg) { 489 if (mFenceFd >= 0 && !mIsReadFence) { 490 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 491 } 492} 493 494//////////////////////////////////////////////////////////////////////////////// 495 496ACodec::ACodec() 497 : mSampleRate(0), 498 mQuirks(0), 499 mNode(0), 500 mUsingNativeWindow(false), 501 mNativeWindowUsageBits(0), 502 mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN), 503 mIsVideo(false), 504 mIsEncoder(false), 505 mFatalError(false), 506 mShutdownInProgress(false), 507 mExplicitShutdown(false), 508 mIsLegacyVP9Decoder(false), 509 mEncoderDelay(0), 510 mEncoderPadding(0), 511 mRotationDegrees(0), 512 mChannelMaskPresent(false), 513 mChannelMask(0), 514 mDequeueCounter(0), 515 mInputMetadataType(kMetadataBufferTypeInvalid), 516 mOutputMetadataType(kMetadataBufferTypeInvalid), 517 mLegacyAdaptiveExperiment(false), 518 mMetadataBuffersToSubmit(0), 519 mNumUndequeuedBuffers(0), 520 mRepeatFrameDelayUs(-1ll), 521 mMaxPtsGapUs(-1ll), 522 mMaxFps(-1), 523 mTimePerFrameUs(-1ll), 524 mTimePerCaptureUs(-1ll), 525 mCreateInputBuffersSuspended(false), 526 mTunneled(false), 527 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0), 528 mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0) { 529 mUninitializedState = new UninitializedState(this); 530 mLoadedState = new LoadedState(this); 531 mLoadedToIdleState = new LoadedToIdleState(this); 532 mIdleToExecutingState = new IdleToExecutingState(this); 533 mExecutingState = new ExecutingState(this); 534 535 mOutputPortSettingsChangedState = 536 new OutputPortSettingsChangedState(this); 537 538 mExecutingToIdleState = new ExecutingToIdleState(this); 539 mIdleToLoadedState = new IdleToLoadedState(this); 540 mFlushingState = new FlushingState(this); 541 542 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 543 mInputEOSResult = OK; 544 545 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 546 547 changeState(mUninitializedState); 548} 549 550ACodec::~ACodec() { 551} 552 553void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 554 mNotify = msg; 555} 556 557void ACodec::initiateSetup(const sp<AMessage> &msg) { 558 msg->setWhat(kWhatSetup); 559 msg->setTarget(this); 560 msg->post(); 561} 562 563void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 564 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 565 msg->setMessage("params", params); 566 msg->post(); 567} 568 569void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 570 msg->setWhat(kWhatAllocateComponent); 571 msg->setTarget(this); 572 msg->post(); 573} 574 575void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 576 msg->setWhat(kWhatConfigureComponent); 577 msg->setTarget(this); 578 msg->post(); 579} 580 581status_t ACodec::setSurface(const sp<Surface> &surface) { 582 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 583 msg->setObject("surface", surface); 584 585 sp<AMessage> response; 586 status_t err = msg->postAndAwaitResponse(&response); 587 588 if (err == OK) { 589 (void)response->findInt32("err", &err); 590 } 591 return err; 592} 593 594void ACodec::initiateCreateInputSurface() { 595 (new AMessage(kWhatCreateInputSurface, this))->post(); 596} 597 598void ACodec::initiateSetInputSurface( 599 const sp<PersistentSurface> &surface) { 600 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 601 msg->setObject("input-surface", surface); 602 msg->post(); 603} 604 605void ACodec::signalEndOfInputStream() { 606 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 607} 608 609void ACodec::initiateStart() { 610 (new AMessage(kWhatStart, this))->post(); 611} 612 613void ACodec::signalFlush() { 614 ALOGV("[%s] signalFlush", mComponentName.c_str()); 615 (new AMessage(kWhatFlush, this))->post(); 616} 617 618void ACodec::signalResume() { 619 (new AMessage(kWhatResume, this))->post(); 620} 621 622void ACodec::initiateShutdown(bool keepComponentAllocated) { 623 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 624 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 625 msg->post(); 626 if (!keepComponentAllocated) { 627 // ensure shutdown completes in 3 seconds 628 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 629 } 630} 631 632void ACodec::signalRequestIDRFrame() { 633 (new AMessage(kWhatRequestIDRFrame, this))->post(); 634} 635 636// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 637// Some codecs may return input buffers before having them processed. 638// This causes a halt if we already signaled an EOS on the input 639// port. For now keep submitting an output buffer if there was an 640// EOS on the input port, but not yet on the output port. 641void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 642 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 643 mMetadataBuffersToSubmit > 0) { 644 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 645 } 646} 647 648status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 649 // allow keeping unset surface 650 if (surface == NULL) { 651 if (mNativeWindow != NULL) { 652 ALOGW("cannot unset a surface"); 653 return INVALID_OPERATION; 654 } 655 return OK; 656 } 657 658 // cannot switch from bytebuffers to surface 659 if (mNativeWindow == NULL) { 660 ALOGW("component was not configured with a surface"); 661 return INVALID_OPERATION; 662 } 663 664 ANativeWindow *nativeWindow = surface.get(); 665 // if we have not yet started the codec, we can simply set the native window 666 if (mBuffers[kPortIndexInput].size() == 0) { 667 mNativeWindow = surface; 668 return OK; 669 } 670 671 // we do not support changing a tunneled surface after start 672 if (mTunneled) { 673 ALOGW("cannot change tunneled surface"); 674 return INVALID_OPERATION; 675 } 676 677 int usageBits = 0; 678 // no need to reconnect as we will not dequeue all buffers 679 status_t err = setupNativeWindowSizeFormatAndUsage( 680 nativeWindow, &usageBits, 681 !storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment /* reconnect */); 682 if (err != OK) { 683 return err; 684 } 685 686 int ignoredFlags = kVideoGrallocUsage; 687 // New output surface is not allowed to add new usage flag except ignored ones. 688 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 689 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 690 return BAD_VALUE; 691 } 692 693 // get min undequeued count. We cannot switch to a surface that has a higher 694 // undequeued count than we allocated. 695 int minUndequeuedBuffers = 0; 696 err = nativeWindow->query( 697 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 698 &minUndequeuedBuffers); 699 if (err != 0) { 700 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 701 strerror(-err), -err); 702 return err; 703 } 704 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 705 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 706 minUndequeuedBuffers, mNumUndequeuedBuffers); 707 return BAD_VALUE; 708 } 709 710 // we cannot change the number of output buffers while OMX is running 711 // set up surface to the same count 712 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 713 ALOGV("setting up surface for %zu buffers", buffers.size()); 714 715 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 716 if (err != 0) { 717 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 718 -err); 719 return err; 720 } 721 722 // need to enable allocation when attaching 723 surface->getIGraphicBufferProducer()->allowAllocation(true); 724 725 // for meta data mode, we move dequeud buffers to the new surface. 726 // for non-meta mode, we must move all registered buffers 727 for (size_t i = 0; i < buffers.size(); ++i) { 728 const BufferInfo &info = buffers[i]; 729 // skip undequeued buffers for meta data mode 730 if (storingMetadataInDecodedBuffers() 731 && !mLegacyAdaptiveExperiment 732 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 733 ALOGV("skipping buffer"); 734 continue; 735 } 736 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 737 738 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 739 if (err != OK) { 740 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 741 info.mGraphicBuffer->getNativeBuffer(), 742 strerror(-err), -err); 743 return err; 744 } 745 } 746 747 // cancel undequeued buffers to new surface 748 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 749 for (size_t i = 0; i < buffers.size(); ++i) { 750 BufferInfo &info = buffers.editItemAt(i); 751 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 752 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 753 err = nativeWindow->cancelBuffer( 754 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 755 info.mFenceFd = -1; 756 if (err != OK) { 757 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 758 info.mGraphicBuffer->getNativeBuffer(), 759 strerror(-err), -err); 760 return err; 761 } 762 } 763 } 764 // disallow further allocation 765 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 766 } 767 768 // push blank buffers to previous window if requested 769 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 770 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 771 } 772 773 mNativeWindow = nativeWindow; 774 mNativeWindowUsageBits = usageBits; 775 return OK; 776} 777 778status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 779 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 780 781 CHECK(mDealer[portIndex] == NULL); 782 CHECK(mBuffers[portIndex].isEmpty()); 783 784 status_t err; 785 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 786 if (storingMetadataInDecodedBuffers()) { 787 err = allocateOutputMetadataBuffers(); 788 } else { 789 err = allocateOutputBuffersFromNativeWindow(); 790 } 791 } else { 792 OMX_PARAM_PORTDEFINITIONTYPE def; 793 InitOMXParams(&def); 794 def.nPortIndex = portIndex; 795 796 err = mOMX->getParameter( 797 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 798 799 if (err == OK) { 800 MetadataBufferType type = 801 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 802 size_t bufSize = def.nBufferSize; 803 if (type == kMetadataBufferTypeANWBuffer) { 804 bufSize = sizeof(VideoNativeMetadata); 805 } else if (type == kMetadataBufferTypeNativeHandleSource) { 806 bufSize = sizeof(VideoNativeHandleMetadata); 807 } 808 809 // If using gralloc or native source input metadata buffers, allocate largest 810 // metadata size as we prefer to generate native source metadata, but component 811 // may require gralloc source. For camera source, allocate at least enough 812 // size for native metadata buffers. 813 size_t allottedSize = bufSize; 814 if (portIndex == kPortIndexInput && type == kMetadataBufferTypeANWBuffer) { 815 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 816 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 817 bufSize = max(bufSize, sizeof(VideoNativeMetadata)); 818 } 819 820 size_t conversionBufferSize = 0; 821 822 sp<DataConverter> converter = mConverter[portIndex]; 823 if (converter != NULL) { 824 // here we assume sane conversions of max 4:1, so result fits in int32 825 if (portIndex == kPortIndexInput) { 826 conversionBufferSize = converter->sourceSize(bufSize); 827 } else { 828 conversionBufferSize = converter->targetSize(bufSize); 829 } 830 } 831 832 size_t alignment = MemoryDealer::getAllocationAlignment(); 833 834 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", 835 mComponentName.c_str(), 836 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 837 portIndex == kPortIndexInput ? "input" : "output"); 838 839 // verify buffer sizes to avoid overflow in align() 840 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { 841 ALOGE("b/22885421"); 842 return NO_MEMORY; 843 } 844 845 // don't modify bufSize as OMX may not expect it to increase after negotiation 846 size_t alignedSize = align(bufSize, alignment); 847 size_t alignedConvSize = align(conversionBufferSize, alignment); 848 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { 849 ALOGE("b/22885421"); 850 return NO_MEMORY; 851 } 852 853 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); 854 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 855 856 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 857 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 858 if (mem == NULL || mem->pointer() == NULL) { 859 return NO_MEMORY; 860 } 861 862 BufferInfo info; 863 info.mStatus = BufferInfo::OWNED_BY_US; 864 info.mFenceFd = -1; 865 info.mRenderInfo = NULL; 866 info.mNativeHandle = NULL; 867 868 uint32_t requiresAllocateBufferBit = 869 (portIndex == kPortIndexInput) 870 ? kRequiresAllocateBufferOnInputPorts 871 : kRequiresAllocateBufferOnOutputPorts; 872 873 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 874 mem.clear(); 875 876 void *ptr = NULL; 877 sp<NativeHandle> native_handle; 878 err = mOMX->allocateSecureBuffer( 879 mNode, portIndex, bufSize, &info.mBufferID, 880 &ptr, &native_handle); 881 882 // TRICKY: this representation is unorthodox, but ACodec requires 883 // an ABuffer with a proper size to validate range offsets and lengths. 884 // Since mData is never referenced for secure input, it is used to store 885 // either the pointer to the secure buffer, or the opaque handle as on 886 // some devices ptr is actually an opaque handle, not a pointer. 887 888 // TRICKY2: use native handle as the base of the ABuffer if received one, 889 // because Widevine source only receives these base addresses. 890 const native_handle_t *native_handle_ptr = 891 native_handle == NULL ? NULL : native_handle->handle(); 892 info.mData = new ABuffer( 893 ptr != NULL ? ptr : (void *)native_handle_ptr, bufSize); 894 info.mNativeHandle = native_handle; 895 info.mCodecData = info.mData; 896 } else if (mQuirks & requiresAllocateBufferBit) { 897 err = mOMX->allocateBufferWithBackup( 898 mNode, portIndex, mem, &info.mBufferID, allottedSize); 899 } else { 900 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize); 901 } 902 903 if (mem != NULL) { 904 info.mCodecData = new ABuffer(mem->pointer(), bufSize); 905 info.mCodecRef = mem; 906 907 if (type == kMetadataBufferTypeANWBuffer) { 908 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 909 } 910 911 // if we require conversion, allocate conversion buffer for client use; 912 // otherwise, reuse codec buffer 913 if (mConverter[portIndex] != NULL) { 914 CHECK_GT(conversionBufferSize, (size_t)0); 915 mem = mDealer[portIndex]->allocate(conversionBufferSize); 916 if (mem == NULL|| mem->pointer() == NULL) { 917 return NO_MEMORY; 918 } 919 info.mData = new ABuffer(mem->pointer(), conversionBufferSize); 920 info.mMemRef = mem; 921 } else { 922 info.mData = info.mCodecData; 923 info.mMemRef = info.mCodecRef; 924 } 925 } 926 927 mBuffers[portIndex].push(info); 928 } 929 } 930 } 931 932 if (err != OK) { 933 return err; 934 } 935 936 sp<AMessage> notify = mNotify->dup(); 937 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 938 939 notify->setInt32("portIndex", portIndex); 940 941 sp<PortDescription> desc = new PortDescription; 942 943 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 944 const BufferInfo &info = mBuffers[portIndex][i]; 945 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef); 946 } 947 948 notify->setObject("portDesc", desc); 949 notify->post(); 950 951 return OK; 952} 953 954status_t ACodec::setupNativeWindowSizeFormatAndUsage( 955 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */, 956 bool reconnect) { 957 OMX_PARAM_PORTDEFINITIONTYPE def; 958 InitOMXParams(&def); 959 def.nPortIndex = kPortIndexOutput; 960 961 status_t err = mOMX->getParameter( 962 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 963 964 if (err != OK) { 965 return err; 966 } 967 968 OMX_U32 usage = 0; 969 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 970 if (err != 0) { 971 ALOGW("querying usage flags from OMX IL component failed: %d", err); 972 // XXX: Currently this error is logged, but not fatal. 973 usage = 0; 974 } 975 int omxUsage = usage; 976 977 if (mFlags & kFlagIsGrallocUsageProtected) { 978 usage |= GRALLOC_USAGE_PROTECTED; 979 } 980 981 usage |= kVideoGrallocUsage; 982 *finalUsage = usage; 983 984 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 985 mLastNativeWindowDataSpace = HAL_DATASPACE_UNKNOWN; 986 987 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 988 return setNativeWindowSizeFormatAndUsage( 989 nativeWindow, 990 def.format.video.nFrameWidth, 991 def.format.video.nFrameHeight, 992 def.format.video.eColorFormat, 993 mRotationDegrees, 994 usage, 995 reconnect); 996} 997 998status_t ACodec::configureOutputBuffersFromNativeWindow( 999 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 1000 OMX_U32 *minUndequeuedBuffers, bool preregister) { 1001 1002 OMX_PARAM_PORTDEFINITIONTYPE def; 1003 InitOMXParams(&def); 1004 def.nPortIndex = kPortIndexOutput; 1005 1006 status_t err = mOMX->getParameter( 1007 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1008 1009 if (err == OK) { 1010 err = setupNativeWindowSizeFormatAndUsage( 1011 mNativeWindow.get(), &mNativeWindowUsageBits, preregister /* reconnect */); 1012 } 1013 if (err != OK) { 1014 mNativeWindowUsageBits = 0; 1015 return err; 1016 } 1017 1018 // Exits here for tunneled video playback codecs -- i.e. skips native window 1019 // buffer allocation step as this is managed by the tunneled OMX omponent 1020 // itself and explicitly sets def.nBufferCountActual to 0. 1021 if (mTunneled) { 1022 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 1023 def.nBufferCountActual = 0; 1024 err = mOMX->setParameter( 1025 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1026 1027 *minUndequeuedBuffers = 0; 1028 *bufferCount = 0; 1029 *bufferSize = 0; 1030 return err; 1031 } 1032 1033 *minUndequeuedBuffers = 0; 1034 err = mNativeWindow->query( 1035 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 1036 (int *)minUndequeuedBuffers); 1037 1038 if (err != 0) { 1039 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 1040 strerror(-err), -err); 1041 return err; 1042 } 1043 1044 // FIXME: assume that surface is controlled by app (native window 1045 // returns the number for the case when surface is not controlled by app) 1046 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 1047 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 1048 1049 // Use conservative allocation while also trying to reduce starvation 1050 // 1051 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 1052 // minimum needed for the consumer to be able to work 1053 // 2. try to allocate two (2) additional buffers to reduce starvation from 1054 // the consumer 1055 // plus an extra buffer to account for incorrect minUndequeuedBufs 1056 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 1057 OMX_U32 newBufferCount = 1058 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1059 def.nBufferCountActual = newBufferCount; 1060 err = mOMX->setParameter( 1061 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1062 1063 if (err == OK) { 1064 *minUndequeuedBuffers += extraBuffers; 1065 break; 1066 } 1067 1068 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1069 mComponentName.c_str(), newBufferCount, err); 1070 /* exit condition */ 1071 if (extraBuffers == 0) { 1072 return err; 1073 } 1074 } 1075 1076 err = native_window_set_buffer_count( 1077 mNativeWindow.get(), def.nBufferCountActual); 1078 1079 if (err != 0) { 1080 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1081 -err); 1082 return err; 1083 } 1084 1085 *bufferCount = def.nBufferCountActual; 1086 *bufferSize = def.nBufferSize; 1087 return err; 1088} 1089 1090status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1091 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1092 status_t err = configureOutputBuffersFromNativeWindow( 1093 &bufferCount, &bufferSize, &minUndequeuedBuffers, true /* preregister */); 1094 if (err != 0) 1095 return err; 1096 mNumUndequeuedBuffers = minUndequeuedBuffers; 1097 1098 if (!storingMetadataInDecodedBuffers()) { 1099 static_cast<Surface*>(mNativeWindow.get()) 1100 ->getIGraphicBufferProducer()->allowAllocation(true); 1101 } 1102 1103 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1104 "output port", 1105 mComponentName.c_str(), bufferCount, bufferSize); 1106 1107 // Dequeue buffers and send them to OMX 1108 for (OMX_U32 i = 0; i < bufferCount; i++) { 1109 ANativeWindowBuffer *buf; 1110 int fenceFd; 1111 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1112 if (err != 0) { 1113 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1114 break; 1115 } 1116 1117 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1118 BufferInfo info; 1119 info.mStatus = BufferInfo::OWNED_BY_US; 1120 info.mFenceFd = fenceFd; 1121 info.mIsReadFence = false; 1122 info.mRenderInfo = NULL; 1123 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 1124 info.mCodecData = info.mData; 1125 info.mGraphicBuffer = graphicBuffer; 1126 mBuffers[kPortIndexOutput].push(info); 1127 1128 IOMX::buffer_id bufferId; 1129 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 1130 &bufferId); 1131 if (err != 0) { 1132 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1133 "%d", i, err); 1134 break; 1135 } 1136 1137 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1138 1139 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1140 mComponentName.c_str(), 1141 bufferId, graphicBuffer.get()); 1142 } 1143 1144 OMX_U32 cancelStart; 1145 OMX_U32 cancelEnd; 1146 1147 if (err != 0) { 1148 // If an error occurred while dequeuing we need to cancel any buffers 1149 // that were dequeued. 1150 cancelStart = 0; 1151 cancelEnd = mBuffers[kPortIndexOutput].size(); 1152 } else { 1153 // Return the required minimum undequeued buffers to the native window. 1154 cancelStart = bufferCount - minUndequeuedBuffers; 1155 cancelEnd = bufferCount; 1156 } 1157 1158 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1159 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1160 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1161 status_t error = cancelBufferToNativeWindow(info); 1162 if (err == 0) { 1163 err = error; 1164 } 1165 } 1166 } 1167 1168 if (!storingMetadataInDecodedBuffers()) { 1169 static_cast<Surface*>(mNativeWindow.get()) 1170 ->getIGraphicBufferProducer()->allowAllocation(false); 1171 } 1172 1173 return err; 1174} 1175 1176status_t ACodec::allocateOutputMetadataBuffers() { 1177 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1178 status_t err = configureOutputBuffersFromNativeWindow( 1179 &bufferCount, &bufferSize, &minUndequeuedBuffers, 1180 mLegacyAdaptiveExperiment /* preregister */); 1181 if (err != 0) 1182 return err; 1183 mNumUndequeuedBuffers = minUndequeuedBuffers; 1184 1185 ALOGV("[%s] Allocating %u meta buffers on output port", 1186 mComponentName.c_str(), bufferCount); 1187 1188 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1189 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1190 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1191 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1192 1193 // Dequeue buffers and send them to OMX 1194 for (OMX_U32 i = 0; i < bufferCount; i++) { 1195 BufferInfo info; 1196 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1197 info.mFenceFd = -1; 1198 info.mRenderInfo = NULL; 1199 info.mGraphicBuffer = NULL; 1200 info.mDequeuedAt = mDequeueCounter; 1201 1202 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1203 if (mem == NULL || mem->pointer() == NULL) { 1204 return NO_MEMORY; 1205 } 1206 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1207 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1208 } 1209 info.mData = new ABuffer(mem->pointer(), mem->size()); 1210 info.mMemRef = mem; 1211 info.mCodecData = info.mData; 1212 info.mCodecRef = mem; 1213 1214 // we use useBuffer for metadata regardless of quirks 1215 err = mOMX->useBuffer( 1216 mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1217 mBuffers[kPortIndexOutput].push(info); 1218 1219 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1220 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1221 } 1222 1223 if (mLegacyAdaptiveExperiment) { 1224 // preallocate and preregister buffers 1225 static_cast<Surface *>(mNativeWindow.get()) 1226 ->getIGraphicBufferProducer()->allowAllocation(true); 1227 1228 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1229 "output port", 1230 mComponentName.c_str(), bufferCount, bufferSize); 1231 1232 // Dequeue buffers then cancel them all 1233 for (OMX_U32 i = 0; i < bufferCount; i++) { 1234 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1235 1236 ANativeWindowBuffer *buf; 1237 int fenceFd; 1238 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1239 if (err != 0) { 1240 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1241 break; 1242 } 1243 1244 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1245 mOMX->updateGraphicBufferInMeta( 1246 mNode, kPortIndexOutput, graphicBuffer, info->mBufferID); 1247 info->mStatus = BufferInfo::OWNED_BY_US; 1248 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1249 info->mGraphicBuffer = graphicBuffer; 1250 } 1251 1252 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1253 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1254 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1255 status_t error = cancelBufferToNativeWindow(info); 1256 if (err == OK) { 1257 err = error; 1258 } 1259 } 1260 } 1261 1262 static_cast<Surface*>(mNativeWindow.get()) 1263 ->getIGraphicBufferProducer()->allowAllocation(false); 1264 } 1265 1266 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1267 return err; 1268} 1269 1270status_t ACodec::submitOutputMetadataBuffer() { 1271 CHECK(storingMetadataInDecodedBuffers()); 1272 if (mMetadataBuffersToSubmit == 0) 1273 return OK; 1274 1275 BufferInfo *info = dequeueBufferFromNativeWindow(); 1276 if (info == NULL) { 1277 return ERROR_IO; 1278 } 1279 1280 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1281 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1282 1283 --mMetadataBuffersToSubmit; 1284 info->checkWriteFence("submitOutputMetadataBuffer"); 1285 status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd); 1286 info->mFenceFd = -1; 1287 if (err == OK) { 1288 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1289 } 1290 1291 return err; 1292} 1293 1294status_t ACodec::waitForFence(int fd, const char *dbg ) { 1295 status_t res = OK; 1296 if (fd >= 0) { 1297 sp<Fence> fence = new Fence(fd); 1298 res = fence->wait(IOMX::kFenceTimeoutMs); 1299 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1300 } 1301 return res; 1302} 1303 1304// static 1305const char *ACodec::_asString(BufferInfo::Status s) { 1306 switch (s) { 1307 case BufferInfo::OWNED_BY_US: return "OUR"; 1308 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1309 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1310 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1311 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1312 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1313 default: return "?"; 1314 } 1315} 1316 1317void ACodec::dumpBuffers(OMX_U32 portIndex) { 1318 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1319 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1320 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1321 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1322 const BufferInfo &info = mBuffers[portIndex][i]; 1323 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1324 i, info.mBufferID, info.mGraphicBuffer.get(), 1325 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1326 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1327 } 1328} 1329 1330status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1331 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1332 1333 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1334 mComponentName.c_str(), info->mBufferID); 1335 1336 info->checkWriteFence("cancelBufferToNativeWindow"); 1337 int err = mNativeWindow->cancelBuffer( 1338 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1339 info->mFenceFd = -1; 1340 1341 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1342 mComponentName.c_str(), info->mBufferID); 1343 // change ownership even if cancelBuffer fails 1344 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1345 1346 return err; 1347} 1348 1349void ACodec::updateRenderInfoForDequeuedBuffer( 1350 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1351 1352 info->mRenderInfo = 1353 mRenderTracker.updateInfoForDequeuedBuffer( 1354 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1355 1356 // check for any fences already signaled 1357 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1358} 1359 1360void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1361 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1362 mRenderTracker.dumpRenderQueue(); 1363 } 1364} 1365 1366void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1367 sp<AMessage> msg = mNotify->dup(); 1368 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1369 std::list<FrameRenderTracker::Info> done = 1370 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1371 1372 // unlink untracked frames 1373 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1374 it != done.cend(); ++it) { 1375 ssize_t index = it->getIndex(); 1376 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1377 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1378 } else if (index >= 0) { 1379 // THIS SHOULD NEVER HAPPEN 1380 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1381 } 1382 } 1383 1384 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1385 msg->post(); 1386 } 1387} 1388 1389ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1390 ANativeWindowBuffer *buf; 1391 CHECK(mNativeWindow.get() != NULL); 1392 1393 if (mTunneled) { 1394 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1395 " video playback mode mode!"); 1396 return NULL; 1397 } 1398 1399 if (mFatalError) { 1400 ALOGW("not dequeuing from native window due to fatal error"); 1401 return NULL; 1402 } 1403 1404 int fenceFd = -1; 1405 do { 1406 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1407 if (err != 0) { 1408 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1409 return NULL; 1410 } 1411 1412 bool stale = false; 1413 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1414 i--; 1415 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1416 1417 if (info->mGraphicBuffer != NULL && 1418 info->mGraphicBuffer->handle == buf->handle) { 1419 // Since consumers can attach buffers to BufferQueues, it is possible 1420 // that a known yet stale buffer can return from a surface that we 1421 // once used. We can simply ignore this as we have already dequeued 1422 // this buffer properly. NOTE: this does not eliminate all cases, 1423 // e.g. it is possible that we have queued the valid buffer to the 1424 // NW, and a stale copy of the same buffer gets dequeued - which will 1425 // be treated as the valid buffer by ACodec. 1426 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1427 ALOGI("dequeued stale buffer %p. discarding", buf); 1428 stale = true; 1429 break; 1430 } 1431 1432 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1433 info->mStatus = BufferInfo::OWNED_BY_US; 1434 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1435 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1436 return info; 1437 } 1438 } 1439 1440 // It is also possible to receive a previously unregistered buffer 1441 // in non-meta mode. These should be treated as stale buffers. The 1442 // same is possible in meta mode, in which case, it will be treated 1443 // as a normal buffer, which is not desirable. 1444 // TODO: fix this. 1445 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1446 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1447 stale = true; 1448 } 1449 if (stale) { 1450 // TODO: detach stale buffer, but there is no API yet to do it. 1451 buf = NULL; 1452 } 1453 } while (buf == NULL); 1454 1455 // get oldest undequeued buffer 1456 BufferInfo *oldest = NULL; 1457 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1458 i--; 1459 BufferInfo *info = 1460 &mBuffers[kPortIndexOutput].editItemAt(i); 1461 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1462 (oldest == NULL || 1463 // avoid potential issues from counter rolling over 1464 mDequeueCounter - info->mDequeuedAt > 1465 mDequeueCounter - oldest->mDequeuedAt)) { 1466 oldest = info; 1467 } 1468 } 1469 1470 // it is impossible dequeue a buffer when there are no buffers with ANW 1471 CHECK(oldest != NULL); 1472 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1473 // while loop above does not complete 1474 CHECK(storingMetadataInDecodedBuffers()); 1475 1476 // discard buffer in LRU info and replace with new buffer 1477 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1478 oldest->mStatus = BufferInfo::OWNED_BY_US; 1479 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1480 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1481 oldest->mRenderInfo = NULL; 1482 1483 mOMX->updateGraphicBufferInMeta( 1484 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 1485 oldest->mBufferID); 1486 1487 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1488 VideoGrallocMetadata *grallocMeta = 1489 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base()); 1490 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1491 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1492 mDequeueCounter - oldest->mDequeuedAt, 1493 (void *)(uintptr_t)grallocMeta->pHandle, 1494 oldest->mGraphicBuffer->handle, oldest->mData->base()); 1495 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1496 VideoNativeMetadata *nativeMeta = 1497 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base()); 1498 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1499 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1500 mDequeueCounter - oldest->mDequeuedAt, 1501 (void *)(uintptr_t)nativeMeta->pBuffer, 1502 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base()); 1503 } 1504 1505 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1506 return oldest; 1507} 1508 1509status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1510 status_t err = OK; 1511 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1512 i--; 1513 status_t err2 = freeBuffer(portIndex, i); 1514 if (err == OK) { 1515 err = err2; 1516 } 1517 } 1518 1519 // clear mDealer even on an error 1520 mDealer[portIndex].clear(); 1521 return err; 1522} 1523 1524status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1525 status_t err = OK; 1526 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1527 i--; 1528 BufferInfo *info = 1529 &mBuffers[kPortIndexOutput].editItemAt(i); 1530 1531 // At this time some buffers may still be with the component 1532 // or being drained. 1533 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1534 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1535 status_t err2 = freeBuffer(kPortIndexOutput, i); 1536 if (err == OK) { 1537 err = err2; 1538 } 1539 } 1540 } 1541 1542 return err; 1543} 1544 1545status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1546 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1547 status_t err = OK; 1548 1549 // there should not be any fences in the metadata 1550 MetadataBufferType type = 1551 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1552 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL 1553 && info->mData->size() >= sizeof(VideoNativeMetadata)) { 1554 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; 1555 if (fenceFd >= 0) { 1556 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1557 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1558 } 1559 } 1560 1561 switch (info->mStatus) { 1562 case BufferInfo::OWNED_BY_US: 1563 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1564 (void)cancelBufferToNativeWindow(info); 1565 } 1566 // fall through 1567 1568 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1569 err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID); 1570 break; 1571 1572 default: 1573 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1574 err = FAILED_TRANSACTION; 1575 break; 1576 } 1577 1578 if (info->mFenceFd >= 0) { 1579 ::close(info->mFenceFd); 1580 } 1581 1582 if (portIndex == kPortIndexOutput) { 1583 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1584 info->mRenderInfo = NULL; 1585 } 1586 1587 // remove buffer even if mOMX->freeBuffer fails 1588 mBuffers[portIndex].removeAt(i); 1589 return err; 1590} 1591 1592ACodec::BufferInfo *ACodec::findBufferByID( 1593 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1594 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1595 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1596 1597 if (info->mBufferID == bufferID) { 1598 if (index != NULL) { 1599 *index = i; 1600 } 1601 return info; 1602 } 1603 } 1604 1605 ALOGE("Could not find buffer with ID %u", bufferID); 1606 return NULL; 1607} 1608 1609status_t ACodec::setComponentRole( 1610 bool isEncoder, const char *mime) { 1611 const char *role = getComponentRole(isEncoder, mime); 1612 if (role == NULL) { 1613 return BAD_VALUE; 1614 } 1615 status_t err = setComponentRole(mOMX, mNode, role); 1616 if (err != OK) { 1617 ALOGW("[%s] Failed to set standard component role '%s'.", 1618 mComponentName.c_str(), role); 1619 } 1620 return err; 1621} 1622 1623//static 1624const char *ACodec::getComponentRole( 1625 bool isEncoder, const char *mime) { 1626 struct MimeToRole { 1627 const char *mime; 1628 const char *decoderRole; 1629 const char *encoderRole; 1630 }; 1631 1632 static const MimeToRole kMimeToRole[] = { 1633 { MEDIA_MIMETYPE_AUDIO_MPEG, 1634 "audio_decoder.mp3", "audio_encoder.mp3" }, 1635 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I, 1636 "audio_decoder.mp1", "audio_encoder.mp1" }, 1637 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, 1638 "audio_decoder.mp2", "audio_encoder.mp2" }, 1639 { MEDIA_MIMETYPE_AUDIO_AMR_NB, 1640 "audio_decoder.amrnb", "audio_encoder.amrnb" }, 1641 { MEDIA_MIMETYPE_AUDIO_AMR_WB, 1642 "audio_decoder.amrwb", "audio_encoder.amrwb" }, 1643 { MEDIA_MIMETYPE_AUDIO_AAC, 1644 "audio_decoder.aac", "audio_encoder.aac" }, 1645 { MEDIA_MIMETYPE_AUDIO_VORBIS, 1646 "audio_decoder.vorbis", "audio_encoder.vorbis" }, 1647 { MEDIA_MIMETYPE_AUDIO_OPUS, 1648 "audio_decoder.opus", "audio_encoder.opus" }, 1649 { MEDIA_MIMETYPE_AUDIO_G711_MLAW, 1650 "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" }, 1651 { MEDIA_MIMETYPE_AUDIO_G711_ALAW, 1652 "audio_decoder.g711alaw", "audio_encoder.g711alaw" }, 1653 { MEDIA_MIMETYPE_VIDEO_AVC, 1654 "video_decoder.avc", "video_encoder.avc" }, 1655 { MEDIA_MIMETYPE_VIDEO_HEVC, 1656 "video_decoder.hevc", "video_encoder.hevc" }, 1657 { MEDIA_MIMETYPE_VIDEO_MPEG4, 1658 "video_decoder.mpeg4", "video_encoder.mpeg4" }, 1659 { MEDIA_MIMETYPE_VIDEO_H263, 1660 "video_decoder.h263", "video_encoder.h263" }, 1661 { MEDIA_MIMETYPE_VIDEO_VP8, 1662 "video_decoder.vp8", "video_encoder.vp8" }, 1663 { MEDIA_MIMETYPE_VIDEO_VP9, 1664 "video_decoder.vp9", "video_encoder.vp9" }, 1665 { MEDIA_MIMETYPE_AUDIO_RAW, 1666 "audio_decoder.raw", "audio_encoder.raw" }, 1667 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, 1668 "video_decoder.dolby-vision", "video_encoder.dolby-vision" }, 1669 { MEDIA_MIMETYPE_AUDIO_FLAC, 1670 "audio_decoder.flac", "audio_encoder.flac" }, 1671 { MEDIA_MIMETYPE_AUDIO_MSGSM, 1672 "audio_decoder.gsm", "audio_encoder.gsm" }, 1673 { MEDIA_MIMETYPE_VIDEO_MPEG2, 1674 "video_decoder.mpeg2", "video_encoder.mpeg2" }, 1675 { MEDIA_MIMETYPE_AUDIO_AC3, 1676 "audio_decoder.ac3", "audio_encoder.ac3" }, 1677 { MEDIA_MIMETYPE_AUDIO_EAC3, 1678 "audio_decoder.eac3", "audio_encoder.eac3" }, 1679 }; 1680 1681 static const size_t kNumMimeToRole = 1682 sizeof(kMimeToRole) / sizeof(kMimeToRole[0]); 1683 1684 size_t i; 1685 for (i = 0; i < kNumMimeToRole; ++i) { 1686 if (!strcasecmp(mime, kMimeToRole[i].mime)) { 1687 break; 1688 } 1689 } 1690 1691 if (i == kNumMimeToRole) { 1692 return NULL; 1693 } 1694 1695 return isEncoder ? kMimeToRole[i].encoderRole 1696 : kMimeToRole[i].decoderRole; 1697} 1698 1699//static 1700status_t ACodec::setComponentRole( 1701 const sp<IOMX> &omx, IOMX::node_id node, const char *role) { 1702 OMX_PARAM_COMPONENTROLETYPE roleParams; 1703 InitOMXParams(&roleParams); 1704 1705 strncpy((char *)roleParams.cRole, 1706 role, OMX_MAX_STRINGNAME_SIZE - 1); 1707 1708 roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0'; 1709 1710 return omx->setParameter( 1711 node, OMX_IndexParamStandardComponentRole, 1712 &roleParams, sizeof(roleParams)); 1713} 1714 1715status_t ACodec::configureCodec( 1716 const char *mime, const sp<AMessage> &msg) { 1717 int32_t encoder; 1718 if (!msg->findInt32("encoder", &encoder)) { 1719 encoder = false; 1720 } 1721 1722 sp<AMessage> inputFormat = new AMessage; 1723 sp<AMessage> outputFormat = new AMessage; 1724 mConfigFormat = msg; 1725 1726 mIsEncoder = encoder; 1727 1728 mInputMetadataType = kMetadataBufferTypeInvalid; 1729 mOutputMetadataType = kMetadataBufferTypeInvalid; 1730 1731 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1732 1733 if (err != OK) { 1734 return err; 1735 } 1736 1737 int32_t bitRate = 0; 1738 // FLAC encoder doesn't need a bitrate, other encoders do 1739 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1740 && !msg->findInt32("bitrate", &bitRate)) { 1741 return INVALID_OPERATION; 1742 } 1743 1744 // propagate bitrate to the output so that the muxer has it 1745 if (encoder && msg->findInt32("bitrate", &bitRate)) { 1746 // Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the 1747 // average bitrate. We've been setting both bitrate and max-bitrate to this same value. 1748 outputFormat->setInt32("bitrate", bitRate); 1749 outputFormat->setInt32("max-bitrate", bitRate); 1750 } 1751 1752 int32_t storeMeta; 1753 if (encoder 1754 && msg->findInt32("android._input-metadata-buffer-type", &storeMeta) 1755 && storeMeta != kMetadataBufferTypeInvalid) { 1756 mInputMetadataType = (MetadataBufferType)storeMeta; 1757 err = mOMX->storeMetaDataInBuffers( 1758 mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1759 if (err != OK) { 1760 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1761 mComponentName.c_str(), err); 1762 1763 return err; 1764 } else if (storeMeta == kMetadataBufferTypeANWBuffer 1765 && mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1766 // IOMX translates ANWBuffers to gralloc source already. 1767 mInputMetadataType = (MetadataBufferType)storeMeta; 1768 } 1769 1770 uint32_t usageBits; 1771 if (mOMX->getParameter( 1772 mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1773 &usageBits, sizeof(usageBits)) == OK) { 1774 inputFormat->setInt32( 1775 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1776 } 1777 } 1778 1779 int32_t prependSPSPPS = 0; 1780 if (encoder 1781 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1782 && prependSPSPPS != 0) { 1783 OMX_INDEXTYPE index; 1784 err = mOMX->getExtensionIndex( 1785 mNode, 1786 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1787 &index); 1788 1789 if (err == OK) { 1790 PrependSPSPPSToIDRFramesParams params; 1791 InitOMXParams(¶ms); 1792 params.bEnable = OMX_TRUE; 1793 1794 err = mOMX->setParameter( 1795 mNode, index, ¶ms, sizeof(params)); 1796 } 1797 1798 if (err != OK) { 1799 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1800 "IDR frames. (err %d)", err); 1801 1802 return err; 1803 } 1804 } 1805 1806 // Only enable metadata mode on encoder output if encoder can prepend 1807 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1808 // opaque handle, to which we don't have access. 1809 int32_t video = !strncasecmp(mime, "video/", 6); 1810 mIsVideo = video; 1811 if (encoder && video) { 1812 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1813 && msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta) 1814 && storeMeta != 0); 1815 1816 mOutputMetadataType = kMetadataBufferTypeNativeHandleSource; 1817 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType); 1818 if (err != OK) { 1819 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1820 mComponentName.c_str(), err); 1821 } 1822 1823 if (!msg->findInt64( 1824 "repeat-previous-frame-after", 1825 &mRepeatFrameDelayUs)) { 1826 mRepeatFrameDelayUs = -1ll; 1827 } 1828 1829 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1830 mMaxPtsGapUs = -1ll; 1831 } 1832 1833 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1834 mMaxFps = -1; 1835 } 1836 1837 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1838 mTimePerCaptureUs = -1ll; 1839 } 1840 1841 if (!msg->findInt32( 1842 "create-input-buffers-suspended", 1843 (int32_t*)&mCreateInputBuffersSuspended)) { 1844 mCreateInputBuffersSuspended = false; 1845 } 1846 } 1847 1848 // NOTE: we only use native window for video decoders 1849 sp<RefBase> obj; 1850 bool haveNativeWindow = msg->findObject("native-window", &obj) 1851 && obj != NULL && video && !encoder; 1852 mUsingNativeWindow = haveNativeWindow; 1853 mLegacyAdaptiveExperiment = false; 1854 if (video && !encoder) { 1855 inputFormat->setInt32("adaptive-playback", false); 1856 1857 int32_t usageProtected; 1858 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1859 if (!haveNativeWindow) { 1860 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1861 return PERMISSION_DENIED; 1862 } 1863 mFlags |= kFlagIsGrallocUsageProtected; 1864 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1865 } 1866 1867 if (mFlags & kFlagIsSecure) { 1868 // use native_handles for secure input buffers 1869 err = mOMX->enableNativeBuffers( 1870 mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1871 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1872 err = OK; // ignore error for now 1873 } 1874 } 1875 if (haveNativeWindow) { 1876 sp<ANativeWindow> nativeWindow = 1877 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1878 1879 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1880 int32_t autoFrc; 1881 if (msg->findInt32("auto-frc", &autoFrc)) { 1882 bool enabled = autoFrc; 1883 OMX_CONFIG_BOOLEANTYPE config; 1884 InitOMXParams(&config); 1885 config.bEnabled = (OMX_BOOL)enabled; 1886 status_t temp = mOMX->setConfig( 1887 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1888 &config, sizeof(config)); 1889 if (temp == OK) { 1890 outputFormat->setInt32("auto-frc", enabled); 1891 } else if (enabled) { 1892 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1893 } 1894 } 1895 // END of temporary support for automatic FRC 1896 1897 int32_t tunneled; 1898 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1899 tunneled != 0) { 1900 ALOGI("Configuring TUNNELED video playback."); 1901 mTunneled = true; 1902 1903 int32_t audioHwSync = 0; 1904 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1905 ALOGW("No Audio HW Sync provided for video tunnel"); 1906 } 1907 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1908 if (err != OK) { 1909 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1910 audioHwSync, nativeWindow.get()); 1911 return err; 1912 } 1913 1914 int32_t maxWidth = 0, maxHeight = 0; 1915 if (msg->findInt32("max-width", &maxWidth) && 1916 msg->findInt32("max-height", &maxHeight)) { 1917 1918 err = mOMX->prepareForAdaptivePlayback( 1919 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1920 if (err != OK) { 1921 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1922 mComponentName.c_str(), err); 1923 // allow failure 1924 err = OK; 1925 } else { 1926 inputFormat->setInt32("max-width", maxWidth); 1927 inputFormat->setInt32("max-height", maxHeight); 1928 inputFormat->setInt32("adaptive-playback", true); 1929 } 1930 } 1931 } else { 1932 ALOGV("Configuring CPU controlled video playback."); 1933 mTunneled = false; 1934 1935 // Explicity reset the sideband handle of the window for 1936 // non-tunneled video in case the window was previously used 1937 // for a tunneled video playback. 1938 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1939 if (err != OK) { 1940 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1941 return err; 1942 } 1943 1944 // Always try to enable dynamic output buffers on native surface 1945 mOutputMetadataType = kMetadataBufferTypeANWBuffer; 1946 err = mOMX->storeMetaDataInBuffers( 1947 mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1948 if (err != OK) { 1949 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1950 mComponentName.c_str(), err); 1951 1952 // if adaptive playback has been requested, try JB fallback 1953 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1954 // LARGE MEMORY REQUIREMENT 1955 1956 // we will not do adaptive playback on software accessed 1957 // surfaces as they never had to respond to changes in the 1958 // crop window, and we don't trust that they will be able to. 1959 int usageBits = 0; 1960 bool canDoAdaptivePlayback; 1961 1962 if (nativeWindow->query( 1963 nativeWindow.get(), 1964 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1965 &usageBits) != OK) { 1966 canDoAdaptivePlayback = false; 1967 } else { 1968 canDoAdaptivePlayback = 1969 (usageBits & 1970 (GRALLOC_USAGE_SW_READ_MASK | 1971 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1972 } 1973 1974 int32_t maxWidth = 0, maxHeight = 0; 1975 if (canDoAdaptivePlayback && 1976 msg->findInt32("max-width", &maxWidth) && 1977 msg->findInt32("max-height", &maxHeight)) { 1978 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1979 mComponentName.c_str(), maxWidth, maxHeight); 1980 1981 err = mOMX->prepareForAdaptivePlayback( 1982 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1983 maxHeight); 1984 ALOGW_IF(err != OK, 1985 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1986 mComponentName.c_str(), err); 1987 1988 if (err == OK) { 1989 inputFormat->setInt32("max-width", maxWidth); 1990 inputFormat->setInt32("max-height", maxHeight); 1991 inputFormat->setInt32("adaptive-playback", true); 1992 } 1993 } 1994 // allow failure 1995 err = OK; 1996 } else { 1997 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1998 mComponentName.c_str()); 1999 CHECK(storingMetadataInDecodedBuffers()); 2000 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 2001 "legacy-adaptive", !msg->contains("no-experiments")); 2002 2003 inputFormat->setInt32("adaptive-playback", true); 2004 } 2005 2006 int32_t push; 2007 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 2008 && push != 0) { 2009 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 2010 } 2011 } 2012 2013 int32_t rotationDegrees; 2014 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 2015 mRotationDegrees = rotationDegrees; 2016 } else { 2017 mRotationDegrees = 0; 2018 } 2019 } 2020 2021 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 2022 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 2023 // invalid encodings will default to PCM-16bit in setupRawAudioFormat. 2024 2025 if (video) { 2026 // determine need for software renderer 2027 bool usingSwRenderer = false; 2028 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 2029 usingSwRenderer = true; 2030 haveNativeWindow = false; 2031 } 2032 2033 if (encoder) { 2034 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 2035 } else { 2036 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2037 } 2038 2039 if (err != OK) { 2040 return err; 2041 } 2042 2043 if (haveNativeWindow) { 2044 mNativeWindow = static_cast<Surface *>(obj.get()); 2045 } 2046 2047 // initialize native window now to get actual output format 2048 // TODO: this is needed for some encoders even though they don't use native window 2049 err = initNativeWindow(); 2050 if (err != OK) { 2051 return err; 2052 } 2053 2054 // fallback for devices that do not handle flex-YUV for native buffers 2055 if (haveNativeWindow) { 2056 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 2057 if (msg->findInt32("color-format", &requestedColorFormat) && 2058 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 2059 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 2060 if (err != OK) { 2061 return err; 2062 } 2063 int32_t colorFormat = OMX_COLOR_FormatUnused; 2064 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 2065 if (!outputFormat->findInt32("color-format", &colorFormat)) { 2066 ALOGE("ouptut port did not have a color format (wrong domain?)"); 2067 return BAD_VALUE; 2068 } 2069 ALOGD("[%s] Requested output format %#x and got %#x.", 2070 mComponentName.c_str(), requestedColorFormat, colorFormat); 2071 if (!isFlexibleColorFormat( 2072 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 2073 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 2074 // device did not handle flex-YUV request for native window, fall back 2075 // to SW renderer 2076 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 2077 mNativeWindow.clear(); 2078 mNativeWindowUsageBits = 0; 2079 haveNativeWindow = false; 2080 usingSwRenderer = true; 2081 if (storingMetadataInDecodedBuffers()) { 2082 err = mOMX->storeMetaDataInBuffers( 2083 mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 2084 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 2085 // TODO: implement adaptive-playback support for bytebuffer mode. 2086 // This is done by SW codecs, but most HW codecs don't support it. 2087 inputFormat->setInt32("adaptive-playback", false); 2088 } 2089 if (err == OK) { 2090 err = mOMX->enableNativeBuffers( 2091 mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 2092 } 2093 if (mFlags & kFlagIsGrallocUsageProtected) { 2094 // fallback is not supported for protected playback 2095 err = PERMISSION_DENIED; 2096 } else if (err == OK) { 2097 err = setupVideoDecoder( 2098 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2099 } 2100 } 2101 } 2102 } 2103 2104 if (usingSwRenderer) { 2105 outputFormat->setInt32("using-sw-renderer", 1); 2106 } 2107 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2108 int32_t numChannels, sampleRate; 2109 if (!msg->findInt32("channel-count", &numChannels) 2110 || !msg->findInt32("sample-rate", &sampleRate)) { 2111 // Since we did not always check for these, leave them optional 2112 // and have the decoder figure it all out. 2113 err = OK; 2114 } else { 2115 err = setupRawAudioFormat( 2116 encoder ? kPortIndexInput : kPortIndexOutput, 2117 sampleRate, 2118 numChannels); 2119 } 2120 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2121 int32_t numChannels, sampleRate; 2122 if (!msg->findInt32("channel-count", &numChannels) 2123 || !msg->findInt32("sample-rate", &sampleRate)) { 2124 err = INVALID_OPERATION; 2125 } else { 2126 int32_t isADTS, aacProfile; 2127 int32_t sbrMode; 2128 int32_t maxOutputChannelCount; 2129 int32_t pcmLimiterEnable; 2130 drcParams_t drc; 2131 if (!msg->findInt32("is-adts", &isADTS)) { 2132 isADTS = 0; 2133 } 2134 if (!msg->findInt32("aac-profile", &aacProfile)) { 2135 aacProfile = OMX_AUDIO_AACObjectNull; 2136 } 2137 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2138 sbrMode = -1; 2139 } 2140 2141 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2142 maxOutputChannelCount = -1; 2143 } 2144 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2145 // value is unknown 2146 pcmLimiterEnable = -1; 2147 } 2148 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2149 // value is unknown 2150 drc.encodedTargetLevel = -1; 2151 } 2152 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2153 // value is unknown 2154 drc.drcCut = -1; 2155 } 2156 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2157 // value is unknown 2158 drc.drcBoost = -1; 2159 } 2160 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2161 // value is unknown 2162 drc.heavyCompression = -1; 2163 } 2164 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2165 // value is unknown 2166 drc.targetRefLevel = -1; 2167 } 2168 2169 err = setupAACCodec( 2170 encoder, numChannels, sampleRate, bitRate, aacProfile, 2171 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2172 pcmLimiterEnable); 2173 } 2174 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2175 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2176 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2177 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2178 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2179 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2180 // These are PCM-like formats with a fixed sample rate but 2181 // a variable number of channels. 2182 2183 int32_t numChannels; 2184 if (!msg->findInt32("channel-count", &numChannels)) { 2185 err = INVALID_OPERATION; 2186 } else { 2187 int32_t sampleRate; 2188 if (!msg->findInt32("sample-rate", &sampleRate)) { 2189 sampleRate = 8000; 2190 } 2191 err = setupG711Codec(encoder, sampleRate, numChannels); 2192 } 2193 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2194 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2195 if (encoder && 2196 (!msg->findInt32("channel-count", &numChannels) 2197 || !msg->findInt32("sample-rate", &sampleRate))) { 2198 ALOGE("missing channel count or sample rate for FLAC encoder"); 2199 err = INVALID_OPERATION; 2200 } else { 2201 if (encoder) { 2202 if (!msg->findInt32( 2203 "complexity", &compressionLevel) && 2204 !msg->findInt32( 2205 "flac-compression-level", &compressionLevel)) { 2206 compressionLevel = 5; // default FLAC compression level 2207 } else if (compressionLevel < 0) { 2208 ALOGW("compression level %d outside [0..8] range, " 2209 "using 0", 2210 compressionLevel); 2211 compressionLevel = 0; 2212 } else if (compressionLevel > 8) { 2213 ALOGW("compression level %d outside [0..8] range, " 2214 "using 8", 2215 compressionLevel); 2216 compressionLevel = 8; 2217 } 2218 } 2219 err = setupFlacCodec( 2220 encoder, numChannels, sampleRate, compressionLevel); 2221 } 2222 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2223 int32_t numChannels, sampleRate; 2224 if (encoder 2225 || !msg->findInt32("channel-count", &numChannels) 2226 || !msg->findInt32("sample-rate", &sampleRate)) { 2227 err = INVALID_OPERATION; 2228 } else { 2229 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); 2230 } 2231 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2232 int32_t numChannels; 2233 int32_t sampleRate; 2234 if (!msg->findInt32("channel-count", &numChannels) 2235 || !msg->findInt32("sample-rate", &sampleRate)) { 2236 err = INVALID_OPERATION; 2237 } else { 2238 err = setupAC3Codec(encoder, numChannels, sampleRate); 2239 } 2240 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2241 int32_t numChannels; 2242 int32_t sampleRate; 2243 if (!msg->findInt32("channel-count", &numChannels) 2244 || !msg->findInt32("sample-rate", &sampleRate)) { 2245 err = INVALID_OPERATION; 2246 } else { 2247 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2248 } 2249 } 2250 2251 if (err != OK) { 2252 return err; 2253 } 2254 2255 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2256 mEncoderDelay = 0; 2257 } 2258 2259 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2260 mEncoderPadding = 0; 2261 } 2262 2263 if (msg->findInt32("channel-mask", &mChannelMask)) { 2264 mChannelMaskPresent = true; 2265 } else { 2266 mChannelMaskPresent = false; 2267 } 2268 2269 int32_t maxInputSize; 2270 if (msg->findInt32("max-input-size", &maxInputSize)) { 2271 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2272 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2273 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2274 } 2275 2276 int32_t priority; 2277 if (msg->findInt32("priority", &priority)) { 2278 err = setPriority(priority); 2279 } 2280 2281 int32_t rateInt = -1; 2282 float rateFloat = -1; 2283 if (!msg->findFloat("operating-rate", &rateFloat)) { 2284 msg->findInt32("operating-rate", &rateInt); 2285 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2286 } 2287 if (rateFloat > 0) { 2288 err = setOperatingRate(rateFloat, video); 2289 } 2290 2291 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame. 2292 mBaseOutputFormat = outputFormat; 2293 // trigger a kWhatOutputFormatChanged msg on first buffer 2294 mLastOutputFormat.clear(); 2295 2296 err = getPortFormat(kPortIndexInput, inputFormat); 2297 if (err == OK) { 2298 err = getPortFormat(kPortIndexOutput, outputFormat); 2299 if (err == OK) { 2300 mInputFormat = inputFormat; 2301 mOutputFormat = outputFormat; 2302 } 2303 } 2304 2305 // create data converters if needed 2306 if (!video && err == OK) { 2307 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 2308 if (encoder) { 2309 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2310 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); 2311 if (mConverter[kPortIndexInput] != NULL) { 2312 mInputFormat->setInt32("pcm-encoding", pcmEncoding); 2313 } 2314 } else { 2315 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2316 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 2317 if (mConverter[kPortIndexOutput] != NULL) { 2318 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 2319 } 2320 } 2321 } 2322 2323 return err; 2324} 2325 2326status_t ACodec::setPriority(int32_t priority) { 2327 if (priority < 0) { 2328 return BAD_VALUE; 2329 } 2330 OMX_PARAM_U32TYPE config; 2331 InitOMXParams(&config); 2332 config.nU32 = (OMX_U32)priority; 2333 status_t temp = mOMX->setConfig( 2334 mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2335 &config, sizeof(config)); 2336 if (temp != OK) { 2337 ALOGI("codec does not support config priority (err %d)", temp); 2338 } 2339 return OK; 2340} 2341 2342status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2343 if (rateFloat < 0) { 2344 return BAD_VALUE; 2345 } 2346 OMX_U32 rate; 2347 if (isVideo) { 2348 if (rateFloat > 65535) { 2349 return BAD_VALUE; 2350 } 2351 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2352 } else { 2353 if (rateFloat > UINT_MAX) { 2354 return BAD_VALUE; 2355 } 2356 rate = (OMX_U32)(rateFloat); 2357 } 2358 OMX_PARAM_U32TYPE config; 2359 InitOMXParams(&config); 2360 config.nU32 = rate; 2361 status_t err = mOMX->setConfig( 2362 mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2363 &config, sizeof(config)); 2364 if (err != OK) { 2365 ALOGI("codec does not support config operating rate (err %d)", err); 2366 } 2367 return OK; 2368} 2369 2370status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2371 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2372 InitOMXParams(¶ms); 2373 params.nPortIndex = kPortIndexOutput; 2374 status_t err = mOMX->getConfig( 2375 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2376 if (err == OK) { 2377 *intraRefreshPeriod = params.nRefreshPeriod; 2378 return OK; 2379 } 2380 2381 // Fallback to query through standard OMX index. 2382 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2383 InitOMXParams(&refreshParams); 2384 refreshParams.nPortIndex = kPortIndexOutput; 2385 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2386 err = mOMX->getParameter( 2387 mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2388 if (err != OK || refreshParams.nCirMBs == 0) { 2389 *intraRefreshPeriod = 0; 2390 return OK; 2391 } 2392 2393 // Calculate period based on width and height 2394 uint32_t width, height; 2395 OMX_PARAM_PORTDEFINITIONTYPE def; 2396 InitOMXParams(&def); 2397 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2398 def.nPortIndex = kPortIndexOutput; 2399 err = mOMX->getParameter( 2400 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2401 if (err != OK) { 2402 *intraRefreshPeriod = 0; 2403 return err; 2404 } 2405 width = video_def->nFrameWidth; 2406 height = video_def->nFrameHeight; 2407 // Use H.264/AVC MacroBlock size 16x16 2408 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2409 2410 return OK; 2411} 2412 2413status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2414 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2415 InitOMXParams(¶ms); 2416 params.nPortIndex = kPortIndexOutput; 2417 params.nRefreshPeriod = intraRefreshPeriod; 2418 status_t err = mOMX->setConfig( 2419 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2420 if (err == OK) { 2421 return OK; 2422 } 2423 2424 // Only in configure state, a component could invoke setParameter. 2425 if (!inConfigure) { 2426 return INVALID_OPERATION; 2427 } else { 2428 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2429 } 2430 2431 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2432 InitOMXParams(&refreshParams); 2433 refreshParams.nPortIndex = kPortIndexOutput; 2434 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2435 2436 if (intraRefreshPeriod == 0) { 2437 // 0 means disable intra refresh. 2438 refreshParams.nCirMBs = 0; 2439 } else { 2440 // Calculate macroblocks that need to be intra coded base on width and height 2441 uint32_t width, height; 2442 OMX_PARAM_PORTDEFINITIONTYPE def; 2443 InitOMXParams(&def); 2444 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2445 def.nPortIndex = kPortIndexOutput; 2446 err = mOMX->getParameter( 2447 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2448 if (err != OK) { 2449 return err; 2450 } 2451 width = video_def->nFrameWidth; 2452 height = video_def->nFrameHeight; 2453 // Use H.264/AVC MacroBlock size 16x16 2454 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2455 } 2456 2457 err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh, 2458 &refreshParams, sizeof(refreshParams)); 2459 if (err != OK) { 2460 return err; 2461 } 2462 2463 return OK; 2464} 2465 2466status_t ACodec::configureTemporalLayers( 2467 const sp<AMessage> &msg, bool inConfigure, sp<AMessage> &outputFormat) { 2468 if (!mIsVideo || !mIsEncoder) { 2469 return INVALID_OPERATION; 2470 } 2471 2472 AString tsSchema; 2473 if (!msg->findString("ts-schema", &tsSchema)) { 2474 return OK; 2475 } 2476 2477 unsigned int numLayers = 0; 2478 unsigned int numBLayers = 0; 2479 int tags; 2480 char dummy; 2481 OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE pattern = 2482 OMX_VIDEO_AndroidTemporalLayeringPatternNone; 2483 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1 2484 && numLayers > 0) { 2485 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC; 2486 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c", 2487 &numLayers, &dummy, &numBLayers, &dummy)) 2488 && (tags == 1 || (tags == 3 && dummy == '+')) 2489 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) { 2490 numLayers += numBLayers; 2491 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid; 2492 } else { 2493 ALOGI("Ignoring unsupported ts-schema [%s]", tsSchema.c_str()); 2494 return BAD_VALUE; 2495 } 2496 2497 OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layerParams; 2498 InitOMXParams(&layerParams); 2499 layerParams.nPortIndex = kPortIndexOutput; 2500 2501 status_t err = mOMX->getParameter( 2502 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2503 &layerParams, sizeof(layerParams)); 2504 2505 if (err != OK) { 2506 return err; 2507 } else if (!(layerParams.eSupportedPatterns & pattern)) { 2508 return BAD_VALUE; 2509 } 2510 2511 numLayers = min(numLayers, layerParams.nLayerCountMax); 2512 numBLayers = min(numBLayers, layerParams.nBLayerCountMax); 2513 2514 if (!inConfigure) { 2515 OMX_VIDEO_CONFIG_ANDROID_TEMPORALLAYERINGTYPE layerConfig; 2516 InitOMXParams(&layerConfig); 2517 layerConfig.nPortIndex = kPortIndexOutput; 2518 layerConfig.ePattern = pattern; 2519 layerConfig.nPLayerCountActual = numLayers - numBLayers; 2520 layerConfig.nBLayerCountActual = numBLayers; 2521 layerConfig.bBitrateRatiosSpecified = OMX_FALSE; 2522 2523 err = mOMX->setConfig( 2524 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidVideoTemporalLayering, 2525 &layerConfig, sizeof(layerConfig)); 2526 } else { 2527 layerParams.ePattern = pattern; 2528 layerParams.nPLayerCountActual = numLayers - numBLayers; 2529 layerParams.nBLayerCountActual = numBLayers; 2530 layerParams.bBitrateRatiosSpecified = OMX_FALSE; 2531 2532 err = mOMX->setParameter( 2533 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2534 &layerParams, sizeof(layerParams)); 2535 } 2536 2537 AString configSchema; 2538 if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternAndroid) { 2539 configSchema = AStringPrintf("android.generic.%u+%u", numLayers - numBLayers, numBLayers); 2540 } else if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC) { 2541 configSchema = AStringPrintf("webrtc.vp8.%u", numLayers); 2542 } 2543 2544 if (err != OK) { 2545 ALOGW("Failed to set temporal layers to %s (requested %s)", 2546 configSchema.c_str(), tsSchema.c_str()); 2547 return err; 2548 } 2549 2550 err = mOMX->getParameter( 2551 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2552 &layerParams, sizeof(layerParams)); 2553 2554 if (err == OK) { 2555 ALOGD("Temporal layers requested:%s configured:%s got:%s(%u: P=%u, B=%u)", 2556 tsSchema.c_str(), configSchema.c_str(), 2557 asString(layerParams.ePattern), layerParams.ePattern, 2558 layerParams.nPLayerCountActual, layerParams.nBLayerCountActual); 2559 2560 if (outputFormat.get() == mOutputFormat.get()) { 2561 mOutputFormat = mOutputFormat->dup(); // trigger an output format change event 2562 } 2563 // assume we got what we configured 2564 outputFormat->setString("ts-schema", configSchema); 2565 } 2566 return err; 2567} 2568 2569status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2570 OMX_PARAM_PORTDEFINITIONTYPE def; 2571 InitOMXParams(&def); 2572 def.nPortIndex = portIndex; 2573 2574 status_t err = mOMX->getParameter( 2575 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2576 2577 if (err != OK) { 2578 return err; 2579 } 2580 2581 if (def.nBufferSize >= size) { 2582 return OK; 2583 } 2584 2585 def.nBufferSize = size; 2586 2587 err = mOMX->setParameter( 2588 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2589 2590 if (err != OK) { 2591 return err; 2592 } 2593 2594 err = mOMX->getParameter( 2595 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2596 2597 if (err != OK) { 2598 return err; 2599 } 2600 2601 if (def.nBufferSize < size) { 2602 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2603 return FAILED_TRANSACTION; 2604 } 2605 2606 return OK; 2607} 2608 2609status_t ACodec::selectAudioPortFormat( 2610 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2611 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2612 InitOMXParams(&format); 2613 2614 format.nPortIndex = portIndex; 2615 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2616 format.nIndex = index; 2617 status_t err = mOMX->getParameter( 2618 mNode, OMX_IndexParamAudioPortFormat, 2619 &format, sizeof(format)); 2620 2621 if (err != OK) { 2622 return err; 2623 } 2624 2625 if (format.eEncoding == desiredFormat) { 2626 break; 2627 } 2628 2629 if (index == kMaxIndicesToCheck) { 2630 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2631 mComponentName.c_str(), index, 2632 asString(format.eEncoding), format.eEncoding); 2633 return ERROR_UNSUPPORTED; 2634 } 2635 } 2636 2637 return mOMX->setParameter( 2638 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2639} 2640 2641status_t ACodec::setupAACCodec( 2642 bool encoder, int32_t numChannels, int32_t sampleRate, 2643 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2644 int32_t maxOutputChannelCount, const drcParams_t& drc, 2645 int32_t pcmLimiterEnable) { 2646 if (encoder && isADTS) { 2647 return -EINVAL; 2648 } 2649 2650 status_t err = setupRawAudioFormat( 2651 encoder ? kPortIndexInput : kPortIndexOutput, 2652 sampleRate, 2653 numChannels); 2654 2655 if (err != OK) { 2656 return err; 2657 } 2658 2659 if (encoder) { 2660 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2661 2662 if (err != OK) { 2663 return err; 2664 } 2665 2666 OMX_PARAM_PORTDEFINITIONTYPE def; 2667 InitOMXParams(&def); 2668 def.nPortIndex = kPortIndexOutput; 2669 2670 err = mOMX->getParameter( 2671 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2672 2673 if (err != OK) { 2674 return err; 2675 } 2676 2677 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2678 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2679 2680 err = mOMX->setParameter( 2681 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2682 2683 if (err != OK) { 2684 return err; 2685 } 2686 2687 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2688 InitOMXParams(&profile); 2689 profile.nPortIndex = kPortIndexOutput; 2690 2691 err = mOMX->getParameter( 2692 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2693 2694 if (err != OK) { 2695 return err; 2696 } 2697 2698 profile.nChannels = numChannels; 2699 2700 profile.eChannelMode = 2701 (numChannels == 1) 2702 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2703 2704 profile.nSampleRate = sampleRate; 2705 profile.nBitRate = bitRate; 2706 profile.nAudioBandWidth = 0; 2707 profile.nFrameLength = 0; 2708 profile.nAACtools = OMX_AUDIO_AACToolAll; 2709 profile.nAACERtools = OMX_AUDIO_AACERNone; 2710 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2711 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2712 switch (sbrMode) { 2713 case 0: 2714 // disable sbr 2715 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2716 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2717 break; 2718 case 1: 2719 // enable single-rate sbr 2720 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2721 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2722 break; 2723 case 2: 2724 // enable dual-rate sbr 2725 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2726 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2727 break; 2728 case -1: 2729 // enable both modes -> the codec will decide which mode should be used 2730 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2731 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2732 break; 2733 default: 2734 // unsupported sbr mode 2735 return BAD_VALUE; 2736 } 2737 2738 2739 err = mOMX->setParameter( 2740 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2741 2742 if (err != OK) { 2743 return err; 2744 } 2745 2746 return err; 2747 } 2748 2749 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2750 InitOMXParams(&profile); 2751 profile.nPortIndex = kPortIndexInput; 2752 2753 err = mOMX->getParameter( 2754 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2755 2756 if (err != OK) { 2757 return err; 2758 } 2759 2760 profile.nChannels = numChannels; 2761 profile.nSampleRate = sampleRate; 2762 2763 profile.eAACStreamFormat = 2764 isADTS 2765 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2766 : OMX_AUDIO_AACStreamFormatMP4FF; 2767 2768 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2769 InitOMXParams(&presentation); 2770 presentation.nMaxOutputChannels = maxOutputChannelCount; 2771 presentation.nDrcCut = drc.drcCut; 2772 presentation.nDrcBoost = drc.drcBoost; 2773 presentation.nHeavyCompression = drc.heavyCompression; 2774 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2775 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2776 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2777 2778 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2779 if (res == OK) { 2780 // optional parameters, will not cause configuration failure 2781 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2782 &presentation, sizeof(presentation)); 2783 } else { 2784 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2785 } 2786 mSampleRate = sampleRate; 2787 return res; 2788} 2789 2790status_t ACodec::setupAC3Codec( 2791 bool encoder, int32_t numChannels, int32_t sampleRate) { 2792 status_t err = setupRawAudioFormat( 2793 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2794 2795 if (err != OK) { 2796 return err; 2797 } 2798 2799 if (encoder) { 2800 ALOGW("AC3 encoding is not supported."); 2801 return INVALID_OPERATION; 2802 } 2803 2804 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2805 InitOMXParams(&def); 2806 def.nPortIndex = kPortIndexInput; 2807 2808 err = mOMX->getParameter( 2809 mNode, 2810 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2811 &def, 2812 sizeof(def)); 2813 2814 if (err != OK) { 2815 return err; 2816 } 2817 2818 def.nChannels = numChannels; 2819 def.nSampleRate = sampleRate; 2820 2821 return mOMX->setParameter( 2822 mNode, 2823 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2824 &def, 2825 sizeof(def)); 2826} 2827 2828status_t ACodec::setupEAC3Codec( 2829 bool encoder, int32_t numChannels, int32_t sampleRate) { 2830 status_t err = setupRawAudioFormat( 2831 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2832 2833 if (err != OK) { 2834 return err; 2835 } 2836 2837 if (encoder) { 2838 ALOGW("EAC3 encoding is not supported."); 2839 return INVALID_OPERATION; 2840 } 2841 2842 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2843 InitOMXParams(&def); 2844 def.nPortIndex = kPortIndexInput; 2845 2846 err = mOMX->getParameter( 2847 mNode, 2848 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2849 &def, 2850 sizeof(def)); 2851 2852 if (err != OK) { 2853 return err; 2854 } 2855 2856 def.nChannels = numChannels; 2857 def.nSampleRate = sampleRate; 2858 2859 return mOMX->setParameter( 2860 mNode, 2861 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2862 &def, 2863 sizeof(def)); 2864} 2865 2866static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2867 bool isAMRWB, int32_t bps) { 2868 if (isAMRWB) { 2869 if (bps <= 6600) { 2870 return OMX_AUDIO_AMRBandModeWB0; 2871 } else if (bps <= 8850) { 2872 return OMX_AUDIO_AMRBandModeWB1; 2873 } else if (bps <= 12650) { 2874 return OMX_AUDIO_AMRBandModeWB2; 2875 } else if (bps <= 14250) { 2876 return OMX_AUDIO_AMRBandModeWB3; 2877 } else if (bps <= 15850) { 2878 return OMX_AUDIO_AMRBandModeWB4; 2879 } else if (bps <= 18250) { 2880 return OMX_AUDIO_AMRBandModeWB5; 2881 } else if (bps <= 19850) { 2882 return OMX_AUDIO_AMRBandModeWB6; 2883 } else if (bps <= 23050) { 2884 return OMX_AUDIO_AMRBandModeWB7; 2885 } 2886 2887 // 23850 bps 2888 return OMX_AUDIO_AMRBandModeWB8; 2889 } else { // AMRNB 2890 if (bps <= 4750) { 2891 return OMX_AUDIO_AMRBandModeNB0; 2892 } else if (bps <= 5150) { 2893 return OMX_AUDIO_AMRBandModeNB1; 2894 } else if (bps <= 5900) { 2895 return OMX_AUDIO_AMRBandModeNB2; 2896 } else if (bps <= 6700) { 2897 return OMX_AUDIO_AMRBandModeNB3; 2898 } else if (bps <= 7400) { 2899 return OMX_AUDIO_AMRBandModeNB4; 2900 } else if (bps <= 7950) { 2901 return OMX_AUDIO_AMRBandModeNB5; 2902 } else if (bps <= 10200) { 2903 return OMX_AUDIO_AMRBandModeNB6; 2904 } 2905 2906 // 12200 bps 2907 return OMX_AUDIO_AMRBandModeNB7; 2908 } 2909} 2910 2911status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2912 OMX_AUDIO_PARAM_AMRTYPE def; 2913 InitOMXParams(&def); 2914 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2915 2916 status_t err = 2917 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2918 2919 if (err != OK) { 2920 return err; 2921 } 2922 2923 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2924 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2925 2926 err = mOMX->setParameter( 2927 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2928 2929 if (err != OK) { 2930 return err; 2931 } 2932 2933 return setupRawAudioFormat( 2934 encoder ? kPortIndexInput : kPortIndexOutput, 2935 isWAMR ? 16000 : 8000 /* sampleRate */, 2936 1 /* numChannels */); 2937} 2938 2939status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2940 if (encoder) { 2941 return INVALID_OPERATION; 2942 } 2943 2944 return setupRawAudioFormat( 2945 kPortIndexInput, sampleRate, numChannels); 2946} 2947 2948status_t ACodec::setupFlacCodec( 2949 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2950 2951 if (encoder) { 2952 OMX_AUDIO_PARAM_FLACTYPE def; 2953 InitOMXParams(&def); 2954 def.nPortIndex = kPortIndexOutput; 2955 2956 // configure compression level 2957 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2958 if (err != OK) { 2959 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2960 return err; 2961 } 2962 def.nCompressionLevel = compressionLevel; 2963 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2964 if (err != OK) { 2965 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2966 return err; 2967 } 2968 } 2969 2970 return setupRawAudioFormat( 2971 encoder ? kPortIndexInput : kPortIndexOutput, 2972 sampleRate, 2973 numChannels); 2974} 2975 2976status_t ACodec::setupRawAudioFormat( 2977 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { 2978 OMX_PARAM_PORTDEFINITIONTYPE def; 2979 InitOMXParams(&def); 2980 def.nPortIndex = portIndex; 2981 2982 status_t err = mOMX->getParameter( 2983 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2984 2985 if (err != OK) { 2986 return err; 2987 } 2988 2989 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2990 2991 err = mOMX->setParameter( 2992 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2993 2994 if (err != OK) { 2995 return err; 2996 } 2997 2998 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2999 InitOMXParams(&pcmParams); 3000 pcmParams.nPortIndex = portIndex; 3001 3002 err = mOMX->getParameter( 3003 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 3004 3005 if (err != OK) { 3006 return err; 3007 } 3008 3009 pcmParams.nChannels = numChannels; 3010 switch (encoding) { 3011 case kAudioEncodingPcm8bit: 3012 pcmParams.eNumData = OMX_NumericalDataUnsigned; 3013 pcmParams.nBitPerSample = 8; 3014 break; 3015 case kAudioEncodingPcmFloat: 3016 pcmParams.eNumData = OMX_NumericalDataFloat; 3017 pcmParams.nBitPerSample = 32; 3018 break; 3019 case kAudioEncodingPcm16bit: 3020 pcmParams.eNumData = OMX_NumericalDataSigned; 3021 pcmParams.nBitPerSample = 16; 3022 break; 3023 default: 3024 return BAD_VALUE; 3025 } 3026 pcmParams.bInterleaved = OMX_TRUE; 3027 pcmParams.nSamplingRate = sampleRate; 3028 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 3029 3030 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 3031 return OMX_ErrorNone; 3032 } 3033 3034 err = mOMX->setParameter( 3035 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 3036 // if we could not set up raw format to non-16-bit, try with 16-bit 3037 // NOTE: we will also verify this via readback, in case codec ignores these fields 3038 if (err != OK && encoding != kAudioEncodingPcm16bit) { 3039 pcmParams.eNumData = OMX_NumericalDataSigned; 3040 pcmParams.nBitPerSample = 16; 3041 err = mOMX->setParameter( 3042 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 3043 } 3044 return err; 3045} 3046 3047status_t ACodec::configureTunneledVideoPlayback( 3048 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 3049 native_handle_t* sidebandHandle; 3050 3051 status_t err = mOMX->configureVideoTunnelMode( 3052 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 3053 if (err != OK) { 3054 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 3055 return err; 3056 } 3057 3058 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 3059 if (err != OK) { 3060 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 3061 sidebandHandle, err); 3062 return err; 3063 } 3064 3065 return OK; 3066} 3067 3068status_t ACodec::setVideoPortFormatType( 3069 OMX_U32 portIndex, 3070 OMX_VIDEO_CODINGTYPE compressionFormat, 3071 OMX_COLOR_FORMATTYPE colorFormat, 3072 bool usingNativeBuffers) { 3073 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 3074 InitOMXParams(&format); 3075 format.nPortIndex = portIndex; 3076 format.nIndex = 0; 3077 bool found = false; 3078 3079 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 3080 format.nIndex = index; 3081 status_t err = mOMX->getParameter( 3082 mNode, OMX_IndexParamVideoPortFormat, 3083 &format, sizeof(format)); 3084 3085 if (err != OK) { 3086 return err; 3087 } 3088 3089 // substitute back flexible color format to codec supported format 3090 OMX_U32 flexibleEquivalent; 3091 if (compressionFormat == OMX_VIDEO_CodingUnused 3092 && isFlexibleColorFormat( 3093 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 3094 && colorFormat == flexibleEquivalent) { 3095 ALOGI("[%s] using color format %#x in place of %#x", 3096 mComponentName.c_str(), format.eColorFormat, colorFormat); 3097 colorFormat = format.eColorFormat; 3098 } 3099 3100 // The following assertion is violated by TI's video decoder. 3101 // CHECK_EQ(format.nIndex, index); 3102 3103 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 3104 if (portIndex == kPortIndexInput 3105 && colorFormat == format.eColorFormat) { 3106 // eCompressionFormat does not seem right. 3107 found = true; 3108 break; 3109 } 3110 if (portIndex == kPortIndexOutput 3111 && compressionFormat == format.eCompressionFormat) { 3112 // eColorFormat does not seem right. 3113 found = true; 3114 break; 3115 } 3116 } 3117 3118 if (format.eCompressionFormat == compressionFormat 3119 && format.eColorFormat == colorFormat) { 3120 found = true; 3121 break; 3122 } 3123 3124 if (index == kMaxIndicesToCheck) { 3125 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 3126 mComponentName.c_str(), index, 3127 asString(format.eCompressionFormat), format.eCompressionFormat, 3128 asString(format.eColorFormat), format.eColorFormat); 3129 } 3130 } 3131 3132 if (!found) { 3133 return UNKNOWN_ERROR; 3134 } 3135 3136 status_t err = mOMX->setParameter( 3137 mNode, OMX_IndexParamVideoPortFormat, 3138 &format, sizeof(format)); 3139 3140 return err; 3141} 3142 3143// Set optimal output format. OMX component lists output formats in the order 3144// of preference, but this got more complicated since the introduction of flexible 3145// YUV formats. We support a legacy behavior for applications that do not use 3146// surface output, do not specify an output format, but expect a "usable" standard 3147// OMX format. SW readable and standard formats must be flex-YUV. 3148// 3149// Suggested preference order: 3150// - optimal format for texture rendering (mediaplayer behavior) 3151// - optimal SW readable & texture renderable format (flex-YUV support) 3152// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 3153// - legacy "usable" standard formats 3154// 3155// For legacy support, we prefer a standard format, but will settle for a SW readable 3156// flex-YUV format. 3157status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 3158 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 3159 InitOMXParams(&format); 3160 format.nPortIndex = kPortIndexOutput; 3161 3162 InitOMXParams(&legacyFormat); 3163 // this field will change when we find a suitable legacy format 3164 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 3165 3166 for (OMX_U32 index = 0; ; ++index) { 3167 format.nIndex = index; 3168 status_t err = mOMX->getParameter( 3169 mNode, OMX_IndexParamVideoPortFormat, 3170 &format, sizeof(format)); 3171 if (err != OK) { 3172 // no more formats, pick legacy format if found 3173 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 3174 memcpy(&format, &legacyFormat, sizeof(format)); 3175 break; 3176 } 3177 return err; 3178 } 3179 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 3180 return OMX_ErrorBadParameter; 3181 } 3182 if (!getLegacyFlexibleFormat) { 3183 break; 3184 } 3185 // standard formats that were exposed to users before 3186 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 3187 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 3188 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 3189 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 3190 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 3191 break; 3192 } 3193 // find best legacy non-standard format 3194 OMX_U32 flexibleEquivalent; 3195 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 3196 && isFlexibleColorFormat( 3197 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */, 3198 &flexibleEquivalent) 3199 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 3200 memcpy(&legacyFormat, &format, sizeof(format)); 3201 } 3202 } 3203 return mOMX->setParameter( 3204 mNode, OMX_IndexParamVideoPortFormat, 3205 &format, sizeof(format)); 3206} 3207 3208static const struct VideoCodingMapEntry { 3209 const char *mMime; 3210 OMX_VIDEO_CODINGTYPE mVideoCodingType; 3211} kVideoCodingMapEntry[] = { 3212 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 3213 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 3214 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 3215 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 3216 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 3217 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 3218 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 3219 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 3220}; 3221 3222static status_t GetVideoCodingTypeFromMime( 3223 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3224 for (size_t i = 0; 3225 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3226 ++i) { 3227 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3228 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3229 return OK; 3230 } 3231 } 3232 3233 *codingType = OMX_VIDEO_CodingUnused; 3234 3235 return ERROR_UNSUPPORTED; 3236} 3237 3238static status_t GetMimeTypeForVideoCoding( 3239 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3240 for (size_t i = 0; 3241 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3242 ++i) { 3243 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3244 *mime = kVideoCodingMapEntry[i].mMime; 3245 return OK; 3246 } 3247 } 3248 3249 mime->clear(); 3250 3251 return ERROR_UNSUPPORTED; 3252} 3253 3254status_t ACodec::setPortBufferNum(OMX_U32 portIndex, int bufferNum) { 3255 OMX_PARAM_PORTDEFINITIONTYPE def; 3256 InitOMXParams(&def); 3257 def.nPortIndex = portIndex; 3258 status_t err; 3259 ALOGD("Setting [%s] %s port buffer number: %d", mComponentName.c_str(), 3260 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3261 err = mOMX->getParameter( 3262 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3263 if (err != OK) { 3264 return err; 3265 } 3266 def.nBufferCountActual = bufferNum; 3267 err = mOMX->setParameter( 3268 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3269 if (err != OK) { 3270 // Component could reject this request. 3271 ALOGW("Fail to set [%s] %s port buffer number: %d", mComponentName.c_str(), 3272 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3273 } 3274 return OK; 3275} 3276 3277status_t ACodec::setupVideoDecoder( 3278 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3279 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3280 int32_t width, height; 3281 if (!msg->findInt32("width", &width) 3282 || !msg->findInt32("height", &height)) { 3283 return INVALID_OPERATION; 3284 } 3285 3286 OMX_VIDEO_CODINGTYPE compressionFormat; 3287 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3288 3289 if (err != OK) { 3290 return err; 3291 } 3292 3293 if (compressionFormat == OMX_VIDEO_CodingVP9) { 3294 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 3295 InitOMXParams(¶ms); 3296 params.nPortIndex = kPortIndexInput; 3297 // Check if VP9 decoder advertises supported profiles. 3298 params.nProfileIndex = 0; 3299 status_t err = mOMX->getParameter( 3300 mNode, 3301 OMX_IndexParamVideoProfileLevelQuerySupported, 3302 ¶ms, 3303 sizeof(params)); 3304 mIsLegacyVP9Decoder = err != OK; 3305 } 3306 3307 err = setVideoPortFormatType( 3308 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3309 3310 if (err != OK) { 3311 return err; 3312 } 3313 3314 int32_t tmp; 3315 if (msg->findInt32("color-format", &tmp)) { 3316 OMX_COLOR_FORMATTYPE colorFormat = 3317 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3318 err = setVideoPortFormatType( 3319 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3320 if (err != OK) { 3321 ALOGW("[%s] does not support color format %d", 3322 mComponentName.c_str(), colorFormat); 3323 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3324 } 3325 } else { 3326 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3327 } 3328 3329 if (err != OK) { 3330 return err; 3331 } 3332 3333 // Set the component input buffer number to be |tmp|. If succeed, 3334 // component will set input port buffer number to be |tmp|. If fail, 3335 // component will keep the same buffer number as before. 3336 if (msg->findInt32("android._num-input-buffers", &tmp)) { 3337 err = setPortBufferNum(kPortIndexInput, tmp); 3338 if (err != OK) 3339 return err; 3340 } 3341 3342 // Set the component output buffer number to be |tmp|. If succeed, 3343 // component will set output port buffer number to be |tmp|. If fail, 3344 // component will keep the same buffer number as before. 3345 if (msg->findInt32("android._num-output-buffers", &tmp)) { 3346 err = setPortBufferNum(kPortIndexOutput, tmp); 3347 if (err != OK) 3348 return err; 3349 } 3350 3351 int32_t frameRateInt; 3352 float frameRateFloat; 3353 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3354 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3355 frameRateInt = -1; 3356 } 3357 frameRateFloat = (float)frameRateInt; 3358 } 3359 3360 err = setVideoFormatOnPort( 3361 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3362 3363 if (err != OK) { 3364 return err; 3365 } 3366 3367 err = setVideoFormatOnPort( 3368 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3369 3370 if (err != OK) { 3371 return err; 3372 } 3373 3374 err = setColorAspectsForVideoDecoder( 3375 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3376 if (err == ERROR_UNSUPPORTED) { // support is optional 3377 err = OK; 3378 } 3379 3380 if (err != OK) { 3381 return err; 3382 } 3383 3384 err = setHDRStaticInfoForVideoCodec(kPortIndexOutput, msg, outputFormat); 3385 if (err == ERROR_UNSUPPORTED) { // support is optional 3386 err = OK; 3387 } 3388 return err; 3389} 3390 3391status_t ACodec::initDescribeColorAspectsIndex() { 3392 status_t err = mOMX->getExtensionIndex( 3393 mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3394 if (err != OK) { 3395 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3396 } 3397 return err; 3398} 3399 3400status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3401 status_t err = ERROR_UNSUPPORTED; 3402 if (mDescribeColorAspectsIndex) { 3403 err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3404 } 3405 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3406 mComponentName.c_str(), 3407 params.sAspects.mRange, asString(params.sAspects.mRange), 3408 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3409 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3410 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3411 err, asString(err)); 3412 3413 if (verify && err == OK) { 3414 err = getCodecColorAspects(params); 3415 } 3416 3417 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3418 "[%s] setting color aspects failed even though codec advertises support", 3419 mComponentName.c_str()); 3420 return err; 3421} 3422 3423status_t ACodec::setColorAspectsForVideoDecoder( 3424 int32_t width, int32_t height, bool usingNativeWindow, 3425 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3426 DescribeColorAspectsParams params; 3427 InitOMXParams(¶ms); 3428 params.nPortIndex = kPortIndexOutput; 3429 3430 getColorAspectsFromFormat(configFormat, params.sAspects); 3431 if (usingNativeWindow) { 3432 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3433 // The default aspects will be set back to the output format during the 3434 // getFormat phase of configure(). Set non-Unspecified values back into the 3435 // format, in case component does not support this enumeration. 3436 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3437 } 3438 3439 (void)initDescribeColorAspectsIndex(); 3440 3441 // communicate color aspects to codec 3442 return setCodecColorAspects(params); 3443} 3444 3445status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3446 status_t err = ERROR_UNSUPPORTED; 3447 if (mDescribeColorAspectsIndex) { 3448 err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3449 } 3450 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3451 mComponentName.c_str(), 3452 params.sAspects.mRange, asString(params.sAspects.mRange), 3453 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3454 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3455 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3456 err, asString(err)); 3457 if (params.bRequestingDataSpace) { 3458 ALOGV("for dataspace %#x", params.nDataSpace); 3459 } 3460 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3461 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3462 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3463 mComponentName.c_str()); 3464 } 3465 return err; 3466} 3467 3468status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3469 DescribeColorAspectsParams params; 3470 InitOMXParams(¶ms); 3471 params.nPortIndex = kPortIndexInput; 3472 status_t err = getCodecColorAspects(params); 3473 if (err == OK) { 3474 // we only set encoder input aspects if codec supports them 3475 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3476 } 3477 return err; 3478} 3479 3480status_t ACodec::getDataSpace( 3481 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3482 bool tryCodec) { 3483 status_t err = OK; 3484 if (tryCodec) { 3485 // request dataspace guidance from codec. 3486 params.bRequestingDataSpace = OMX_TRUE; 3487 err = getCodecColorAspects(params); 3488 params.bRequestingDataSpace = OMX_FALSE; 3489 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3490 *dataSpace = (android_dataspace)params.nDataSpace; 3491 return err; 3492 } else if (err == ERROR_UNSUPPORTED) { 3493 // ignore not-implemented error for dataspace requests 3494 err = OK; 3495 } 3496 } 3497 3498 // this returns legacy versions if available 3499 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3500 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3501 "and dataspace %#x", 3502 mComponentName.c_str(), 3503 params.sAspects.mRange, asString(params.sAspects.mRange), 3504 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3505 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3506 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3507 *dataSpace); 3508 return err; 3509} 3510 3511 3512status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3513 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3514 android_dataspace *dataSpace) { 3515 DescribeColorAspectsParams params; 3516 InitOMXParams(¶ms); 3517 params.nPortIndex = kPortIndexOutput; 3518 3519 // reset default format and get resulting format 3520 getColorAspectsFromFormat(configFormat, params.sAspects); 3521 if (dataSpace != NULL) { 3522 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3523 } 3524 status_t err = setCodecColorAspects(params, true /* readBack */); 3525 3526 // we always set specified aspects for decoders 3527 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3528 3529 if (dataSpace != NULL) { 3530 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3531 if (err == OK) { 3532 err = res; 3533 } 3534 } 3535 3536 return err; 3537} 3538 3539// initial video encoder setup for bytebuffer mode 3540status_t ACodec::setColorAspectsForVideoEncoder( 3541 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3542 // copy config to output format as this is not exposed via getFormat 3543 copyColorConfig(configFormat, outputFormat); 3544 3545 DescribeColorAspectsParams params; 3546 InitOMXParams(¶ms); 3547 params.nPortIndex = kPortIndexInput; 3548 getColorAspectsFromFormat(configFormat, params.sAspects); 3549 3550 (void)initDescribeColorAspectsIndex(); 3551 3552 int32_t usingRecorder; 3553 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3554 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3555 int32_t width, height; 3556 if (configFormat->findInt32("width", &width) 3557 && configFormat->findInt32("height", &height)) { 3558 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3559 status_t err = getDataSpace( 3560 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3561 if (err != OK) { 3562 return err; 3563 } 3564 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3565 } 3566 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3567 } 3568 3569 // communicate color aspects to codec, but do not allow change of the platform aspects 3570 ColorAspects origAspects = params.sAspects; 3571 for (int triesLeft = 2; --triesLeft >= 0; ) { 3572 status_t err = setCodecColorAspects(params, true /* readBack */); 3573 if (err != OK 3574 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3575 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3576 return err; 3577 } 3578 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3579 mComponentName.c_str()); 3580 } 3581 return OK; 3582} 3583 3584status_t ACodec::setHDRStaticInfoForVideoCodec( 3585 OMX_U32 portIndex, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3586 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3587 3588 DescribeHDRStaticInfoParams params; 3589 InitOMXParams(¶ms); 3590 params.nPortIndex = portIndex; 3591 3592 HDRStaticInfo *info = ¶ms.sInfo; 3593 if (getHDRStaticInfoFromFormat(configFormat, info)) { 3594 setHDRStaticInfoIntoFormat(params.sInfo, outputFormat); 3595 } 3596 3597 (void)initDescribeHDRStaticInfoIndex(); 3598 3599 // communicate HDR static Info to codec 3600 return setHDRStaticInfo(params); 3601} 3602 3603// subsequent initial video encoder setup for surface mode 3604status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3605 android_dataspace *dataSpace /* nonnull */) { 3606 DescribeColorAspectsParams params; 3607 InitOMXParams(¶ms); 3608 params.nPortIndex = kPortIndexInput; 3609 ColorAspects &aspects = params.sAspects; 3610 3611 // reset default format and store resulting format into both input and output formats 3612 getColorAspectsFromFormat(mConfigFormat, aspects); 3613 int32_t width, height; 3614 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3615 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3616 } 3617 setColorAspectsIntoFormat(aspects, mInputFormat); 3618 setColorAspectsIntoFormat(aspects, mOutputFormat); 3619 3620 // communicate color aspects to codec, but do not allow any change 3621 ColorAspects origAspects = aspects; 3622 status_t err = OK; 3623 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3624 status_t err = setCodecColorAspects(params, true /* readBack */); 3625 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3626 break; 3627 } 3628 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3629 mComponentName.c_str()); 3630 } 3631 3632 *dataSpace = HAL_DATASPACE_BT709; 3633 aspects = origAspects; // restore desired color aspects 3634 status_t res = getDataSpace( 3635 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3636 if (err == OK) { 3637 err = res; 3638 } 3639 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3640 mInputFormat->setBuffer( 3641 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3642 3643 // update input format with codec supported color aspects (basically set unsupported 3644 // aspects to Unspecified) 3645 if (err == OK) { 3646 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3647 } 3648 3649 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3650 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3651 3652 return err; 3653} 3654 3655status_t ACodec::getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format) { 3656 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3657 DescribeHDRStaticInfoParams params; 3658 InitOMXParams(¶ms); 3659 params.nPortIndex = portIndex; 3660 3661 status_t err = getHDRStaticInfo(params); 3662 if (err == OK) { 3663 // we only set decodec output HDRStaticInfo if codec supports them 3664 setHDRStaticInfoIntoFormat(params.sInfo, format); 3665 } 3666 return err; 3667} 3668 3669status_t ACodec::initDescribeHDRStaticInfoIndex() { 3670 status_t err = mOMX->getExtensionIndex( 3671 mNode, "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex); 3672 if (err != OK) { 3673 mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0; 3674 } 3675 return err; 3676} 3677 3678status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) { 3679 status_t err = ERROR_UNSUPPORTED; 3680 if (mDescribeHDRStaticInfoIndex) { 3681 err = mOMX->setConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3682 } 3683 3684 const HDRStaticInfo *info = ¶ms.sInfo; 3685 ALOGV("[%s] setting HDRStaticInfo (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, " 3686 "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)", 3687 mComponentName.c_str(), 3688 info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y, 3689 info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y, 3690 info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance, 3691 info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel); 3692 3693 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3694 "[%s] setting HDRStaticInfo failed even though codec advertises support", 3695 mComponentName.c_str()); 3696 return err; 3697} 3698 3699status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms) { 3700 status_t err = ERROR_UNSUPPORTED; 3701 if (mDescribeHDRStaticInfoIndex) { 3702 err = mOMX->getConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3703 } 3704 3705 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3706 "[%s] getting HDRStaticInfo failed even though codec advertises support", 3707 mComponentName.c_str()); 3708 return err; 3709} 3710 3711status_t ACodec::setupVideoEncoder( 3712 const char *mime, const sp<AMessage> &msg, 3713 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3714 int32_t tmp; 3715 if (!msg->findInt32("color-format", &tmp)) { 3716 return INVALID_OPERATION; 3717 } 3718 3719 OMX_COLOR_FORMATTYPE colorFormat = 3720 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3721 3722 status_t err = setVideoPortFormatType( 3723 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3724 3725 if (err != OK) { 3726 ALOGE("[%s] does not support color format %d", 3727 mComponentName.c_str(), colorFormat); 3728 3729 return err; 3730 } 3731 3732 /* Input port configuration */ 3733 3734 OMX_PARAM_PORTDEFINITIONTYPE def; 3735 InitOMXParams(&def); 3736 3737 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3738 3739 def.nPortIndex = kPortIndexInput; 3740 3741 err = mOMX->getParameter( 3742 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3743 3744 if (err != OK) { 3745 return err; 3746 } 3747 3748 int32_t width, height, bitrate; 3749 if (!msg->findInt32("width", &width) 3750 || !msg->findInt32("height", &height) 3751 || !msg->findInt32("bitrate", &bitrate)) { 3752 return INVALID_OPERATION; 3753 } 3754 3755 video_def->nFrameWidth = width; 3756 video_def->nFrameHeight = height; 3757 3758 int32_t stride; 3759 if (!msg->findInt32("stride", &stride)) { 3760 stride = width; 3761 } 3762 3763 video_def->nStride = stride; 3764 3765 int32_t sliceHeight; 3766 if (!msg->findInt32("slice-height", &sliceHeight)) { 3767 sliceHeight = height; 3768 } 3769 3770 video_def->nSliceHeight = sliceHeight; 3771 3772 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3773 3774 float frameRate; 3775 if (!msg->findFloat("frame-rate", &frameRate)) { 3776 int32_t tmp; 3777 if (!msg->findInt32("frame-rate", &tmp)) { 3778 return INVALID_OPERATION; 3779 } 3780 frameRate = (float)tmp; 3781 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3782 } 3783 3784 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3785 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3786 // this is redundant as it was already set up in setVideoPortFormatType 3787 // FIXME for now skip this only for flexible YUV formats 3788 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3789 video_def->eColorFormat = colorFormat; 3790 } 3791 3792 err = mOMX->setParameter( 3793 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3794 3795 if (err != OK) { 3796 ALOGE("[%s] failed to set input port definition parameters.", 3797 mComponentName.c_str()); 3798 3799 return err; 3800 } 3801 3802 /* Output port configuration */ 3803 3804 OMX_VIDEO_CODINGTYPE compressionFormat; 3805 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3806 3807 if (err != OK) { 3808 return err; 3809 } 3810 3811 err = setVideoPortFormatType( 3812 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3813 3814 if (err != OK) { 3815 ALOGE("[%s] does not support compression format %d", 3816 mComponentName.c_str(), compressionFormat); 3817 3818 return err; 3819 } 3820 3821 def.nPortIndex = kPortIndexOutput; 3822 3823 err = mOMX->getParameter( 3824 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3825 3826 if (err != OK) { 3827 return err; 3828 } 3829 3830 video_def->nFrameWidth = width; 3831 video_def->nFrameHeight = height; 3832 video_def->xFramerate = 0; 3833 video_def->nBitrate = bitrate; 3834 video_def->eCompressionFormat = compressionFormat; 3835 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3836 3837 err = mOMX->setParameter( 3838 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3839 3840 if (err != OK) { 3841 ALOGE("[%s] failed to set output port definition parameters.", 3842 mComponentName.c_str()); 3843 3844 return err; 3845 } 3846 3847 int32_t intraRefreshPeriod = 0; 3848 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3849 && intraRefreshPeriod >= 0) { 3850 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3851 if (err != OK) { 3852 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3853 mComponentName.c_str()); 3854 err = OK; 3855 } 3856 } 3857 3858 switch (compressionFormat) { 3859 case OMX_VIDEO_CodingMPEG4: 3860 err = setupMPEG4EncoderParameters(msg); 3861 break; 3862 3863 case OMX_VIDEO_CodingH263: 3864 err = setupH263EncoderParameters(msg); 3865 break; 3866 3867 case OMX_VIDEO_CodingAVC: 3868 err = setupAVCEncoderParameters(msg); 3869 break; 3870 3871 case OMX_VIDEO_CodingHEVC: 3872 err = setupHEVCEncoderParameters(msg); 3873 break; 3874 3875 case OMX_VIDEO_CodingVP8: 3876 case OMX_VIDEO_CodingVP9: 3877 err = setupVPXEncoderParameters(msg); 3878 break; 3879 3880 default: 3881 break; 3882 } 3883 3884 if (err != OK) { 3885 return err; 3886 } 3887 3888 // Set up color aspects on input, but propagate them to the output format, as they will 3889 // not be read back from encoder. 3890 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3891 if (err == ERROR_UNSUPPORTED) { 3892 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3893 err = OK; 3894 } 3895 3896 if (err != OK) { 3897 return err; 3898 } 3899 3900 err = setHDRStaticInfoForVideoCodec(kPortIndexInput, msg, outputFormat); 3901 if (err == ERROR_UNSUPPORTED) { // support is optional 3902 ALOGI("[%s] cannot encode HDR static metadata. Ignoring.", mComponentName.c_str()); 3903 err = OK; 3904 } 3905 3906 if (err != OK) { 3907 return err; 3908 } 3909 3910 switch (compressionFormat) { 3911 case OMX_VIDEO_CodingAVC: 3912 case OMX_VIDEO_CodingHEVC: 3913 err = configureTemporalLayers(msg, true /* inConfigure */, outputFormat); 3914 if (err != OK) { 3915 err = OK; // ignore failure 3916 } 3917 break; 3918 3919 case OMX_VIDEO_CodingVP8: 3920 case OMX_VIDEO_CodingVP9: 3921 // TODO: do we need to support android.generic layering? webrtc layering is 3922 // already set up in setupVPXEncoderParameters. 3923 break; 3924 3925 default: 3926 break; 3927 } 3928 3929 if (err == OK) { 3930 ALOGI("setupVideoEncoder succeeded"); 3931 } 3932 3933 return err; 3934} 3935 3936status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3937 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3938 InitOMXParams(¶ms); 3939 params.nPortIndex = kPortIndexOutput; 3940 3941 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3942 3943 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3944 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3945 int32_t mbs; 3946 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3947 return INVALID_OPERATION; 3948 } 3949 params.nCirMBs = mbs; 3950 } 3951 3952 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3953 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3954 int32_t mbs; 3955 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3956 return INVALID_OPERATION; 3957 } 3958 params.nAirMBs = mbs; 3959 3960 int32_t ref; 3961 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3962 return INVALID_OPERATION; 3963 } 3964 params.nAirRef = ref; 3965 } 3966 3967 status_t err = mOMX->setParameter( 3968 mNode, OMX_IndexParamVideoIntraRefresh, 3969 ¶ms, sizeof(params)); 3970 return err; 3971} 3972 3973static OMX_U32 setPFramesSpacing( 3974 float iFramesInterval /* seconds */, int32_t frameRate, uint32_t BFramesSpacing = 0) { 3975 // BFramesSpacing is the number of B frames between I/P frames 3976 // PFramesSpacing (the value to be returned) is the number of P frames between I frames 3977 // 3978 // keyFrameInterval = ((PFramesSpacing + 1) * BFramesSpacing) + PFramesSpacing + 1 3979 // ^^^ ^^^ ^^^ 3980 // number of B frames number of P I frame 3981 // 3982 // = (PFramesSpacing + 1) * (BFramesSpacing + 1) 3983 // 3984 // E.g. 3985 // I P I : I-interval: 8, nPFrames 1, nBFrames 3 3986 // BBB BBB 3987 3988 if (iFramesInterval < 0) { // just 1 key frame 3989 return 0xFFFFFFFE; // don't use maxint as key-frame-interval calculation will add 1 3990 } else if (iFramesInterval == 0) { // just key frames 3991 return 0; 3992 } 3993 3994 // round down as key-frame-interval is an upper limit 3995 uint32_t keyFrameInterval = uint32_t(frameRate * iFramesInterval); 3996 OMX_U32 ret = keyFrameInterval / (BFramesSpacing + 1); 3997 return ret > 0 ? ret - 1 : 0; 3998} 3999 4000static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 4001 int32_t tmp; 4002 if (!msg->findInt32("bitrate-mode", &tmp)) { 4003 return OMX_Video_ControlRateVariable; 4004 } 4005 4006 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 4007} 4008 4009status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 4010 int32_t bitrate; 4011 float iFrameInterval; 4012 if (!msg->findInt32("bitrate", &bitrate) 4013 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 4014 return INVALID_OPERATION; 4015 } 4016 4017 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4018 4019 float frameRate; 4020 if (!msg->findFloat("frame-rate", &frameRate)) { 4021 int32_t tmp; 4022 if (!msg->findInt32("frame-rate", &tmp)) { 4023 return INVALID_OPERATION; 4024 } 4025 frameRate = (float)tmp; 4026 } 4027 4028 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 4029 InitOMXParams(&mpeg4type); 4030 mpeg4type.nPortIndex = kPortIndexOutput; 4031 4032 status_t err = mOMX->getParameter( 4033 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 4034 4035 if (err != OK) { 4036 return err; 4037 } 4038 4039 mpeg4type.nSliceHeaderSpacing = 0; 4040 mpeg4type.bSVH = OMX_FALSE; 4041 mpeg4type.bGov = OMX_FALSE; 4042 4043 mpeg4type.nAllowedPictureTypes = 4044 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4045 4046 mpeg4type.nBFrames = 0; 4047 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, mpeg4type.nBFrames); 4048 if (mpeg4type.nPFrames == 0) { 4049 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4050 } 4051 mpeg4type.nIDCVLCThreshold = 0; 4052 mpeg4type.bACPred = OMX_TRUE; 4053 mpeg4type.nMaxPacketSize = 256; 4054 mpeg4type.nTimeIncRes = 1000; 4055 mpeg4type.nHeaderExtension = 0; 4056 mpeg4type.bReversibleVLC = OMX_FALSE; 4057 4058 int32_t profile; 4059 if (msg->findInt32("profile", &profile)) { 4060 int32_t level; 4061 if (!msg->findInt32("level", &level)) { 4062 return INVALID_OPERATION; 4063 } 4064 4065 err = verifySupportForProfileAndLevel(profile, level); 4066 4067 if (err != OK) { 4068 return err; 4069 } 4070 4071 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 4072 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 4073 } 4074 4075 err = mOMX->setParameter( 4076 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 4077 4078 if (err != OK) { 4079 return err; 4080 } 4081 4082 err = configureBitrate(bitrate, bitrateMode); 4083 4084 if (err != OK) { 4085 return err; 4086 } 4087 4088 return setupErrorCorrectionParameters(); 4089} 4090 4091status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 4092 int32_t bitrate; 4093 float iFrameInterval; 4094 if (!msg->findInt32("bitrate", &bitrate) 4095 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 4096 return INVALID_OPERATION; 4097 } 4098 4099 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4100 4101 float frameRate; 4102 if (!msg->findFloat("frame-rate", &frameRate)) { 4103 int32_t tmp; 4104 if (!msg->findInt32("frame-rate", &tmp)) { 4105 return INVALID_OPERATION; 4106 } 4107 frameRate = (float)tmp; 4108 } 4109 4110 OMX_VIDEO_PARAM_H263TYPE h263type; 4111 InitOMXParams(&h263type); 4112 h263type.nPortIndex = kPortIndexOutput; 4113 4114 status_t err = mOMX->getParameter( 4115 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 4116 4117 if (err != OK) { 4118 return err; 4119 } 4120 4121 h263type.nAllowedPictureTypes = 4122 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4123 4124 h263type.nBFrames = 0; 4125 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h263type.nBFrames); 4126 if (h263type.nPFrames == 0) { 4127 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4128 } 4129 4130 int32_t profile; 4131 if (msg->findInt32("profile", &profile)) { 4132 int32_t level; 4133 if (!msg->findInt32("level", &level)) { 4134 return INVALID_OPERATION; 4135 } 4136 4137 err = verifySupportForProfileAndLevel(profile, level); 4138 4139 if (err != OK) { 4140 return err; 4141 } 4142 4143 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 4144 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 4145 } 4146 4147 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 4148 h263type.bForceRoundingTypeToZero = OMX_FALSE; 4149 h263type.nPictureHeaderRepetition = 0; 4150 h263type.nGOBHeaderInterval = 0; 4151 4152 err = mOMX->setParameter( 4153 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 4154 4155 if (err != OK) { 4156 return err; 4157 } 4158 4159 err = configureBitrate(bitrate, bitrateMode); 4160 4161 if (err != OK) { 4162 return err; 4163 } 4164 4165 return setupErrorCorrectionParameters(); 4166} 4167 4168// static 4169int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 4170 int width, int height, int rate, int bitrate, 4171 OMX_VIDEO_AVCPROFILETYPE profile) { 4172 // convert bitrate to main/baseline profile kbps equivalent 4173 switch (profile) { 4174 case OMX_VIDEO_AVCProfileHigh10: 4175 bitrate = divUp(bitrate, 3000); break; 4176 case OMX_VIDEO_AVCProfileHigh: 4177 bitrate = divUp(bitrate, 1250); break; 4178 default: 4179 bitrate = divUp(bitrate, 1000); break; 4180 } 4181 4182 // convert size and rate to MBs 4183 width = divUp(width, 16); 4184 height = divUp(height, 16); 4185 int mbs = width * height; 4186 rate *= mbs; 4187 int maxDimension = max(width, height); 4188 4189 static const int limits[][5] = { 4190 /* MBps MB dim bitrate level */ 4191 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 4192 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 4193 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 4194 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 4195 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 4196 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 4197 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 4198 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 4199 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 4200 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 4201 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 4202 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 4203 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 4204 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 4205 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 4206 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 4207 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 4208 }; 4209 4210 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 4211 const int (&limit)[5] = limits[i]; 4212 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 4213 && bitrate <= limit[3]) { 4214 return limit[4]; 4215 } 4216 } 4217 return 0; 4218} 4219 4220status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 4221 int32_t bitrate; 4222 float iFrameInterval; 4223 if (!msg->findInt32("bitrate", &bitrate) 4224 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 4225 return INVALID_OPERATION; 4226 } 4227 4228 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4229 4230 float frameRate; 4231 if (!msg->findFloat("frame-rate", &frameRate)) { 4232 int32_t tmp; 4233 if (!msg->findInt32("frame-rate", &tmp)) { 4234 return INVALID_OPERATION; 4235 } 4236 frameRate = (float)tmp; 4237 } 4238 4239 status_t err = OK; 4240 int32_t intraRefreshMode = 0; 4241 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 4242 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 4243 if (err != OK) { 4244 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 4245 err, intraRefreshMode); 4246 return err; 4247 } 4248 } 4249 4250 OMX_VIDEO_PARAM_AVCTYPE h264type; 4251 InitOMXParams(&h264type); 4252 h264type.nPortIndex = kPortIndexOutput; 4253 4254 err = mOMX->getParameter( 4255 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4256 4257 if (err != OK) { 4258 return err; 4259 } 4260 4261 h264type.nAllowedPictureTypes = 4262 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4263 4264 int32_t profile; 4265 if (msg->findInt32("profile", &profile)) { 4266 int32_t level; 4267 if (!msg->findInt32("level", &level)) { 4268 return INVALID_OPERATION; 4269 } 4270 4271 err = verifySupportForProfileAndLevel(profile, level); 4272 4273 if (err != OK) { 4274 return err; 4275 } 4276 4277 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 4278 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 4279 } else { 4280 // Use largest supported profile for AVC recording if profile is not specified. 4281 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 4282 for (OMX_VIDEO_AVCPROFILETYPE profile : { 4283 OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCProfileMain }) { 4284 if (verifySupportForProfileAndLevel(profile, 0) == OK) { 4285 h264type.eProfile = profile; 4286 break; 4287 } 4288 } 4289 } 4290 4291 ALOGI("setupAVCEncoderParameters with [profile: %s] [level: %s]", 4292 asString(h264type.eProfile), asString(h264type.eLevel)); 4293 4294 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 4295 h264type.nSliceHeaderSpacing = 0; 4296 h264type.bUseHadamard = OMX_TRUE; 4297 h264type.nRefFrames = 1; 4298 h264type.nBFrames = 0; 4299 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4300 if (h264type.nPFrames == 0) { 4301 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4302 } 4303 h264type.nRefIdx10ActiveMinus1 = 0; 4304 h264type.nRefIdx11ActiveMinus1 = 0; 4305 h264type.bEntropyCodingCABAC = OMX_FALSE; 4306 h264type.bWeightedPPrediction = OMX_FALSE; 4307 h264type.bconstIpred = OMX_FALSE; 4308 h264type.bDirect8x8Inference = OMX_FALSE; 4309 h264type.bDirectSpatialTemporal = OMX_FALSE; 4310 h264type.nCabacInitIdc = 0; 4311 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 4312 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 4313 h264type.nSliceHeaderSpacing = 0; 4314 h264type.bUseHadamard = OMX_TRUE; 4315 h264type.nRefFrames = 2; 4316 h264type.nBFrames = 1; 4317 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4318 h264type.nAllowedPictureTypes = 4319 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 4320 h264type.nRefIdx10ActiveMinus1 = 0; 4321 h264type.nRefIdx11ActiveMinus1 = 0; 4322 h264type.bEntropyCodingCABAC = OMX_TRUE; 4323 h264type.bWeightedPPrediction = OMX_TRUE; 4324 h264type.bconstIpred = OMX_TRUE; 4325 h264type.bDirect8x8Inference = OMX_TRUE; 4326 h264type.bDirectSpatialTemporal = OMX_TRUE; 4327 h264type.nCabacInitIdc = 1; 4328 } 4329 4330 if (h264type.nBFrames != 0) { 4331 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 4332 } 4333 4334 h264type.bEnableUEP = OMX_FALSE; 4335 h264type.bEnableFMO = OMX_FALSE; 4336 h264type.bEnableASO = OMX_FALSE; 4337 h264type.bEnableRS = OMX_FALSE; 4338 h264type.bFrameMBsOnly = OMX_TRUE; 4339 h264type.bMBAFF = OMX_FALSE; 4340 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 4341 4342 err = mOMX->setParameter( 4343 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4344 4345 if (err != OK) { 4346 return err; 4347 } 4348 4349 // TRICKY: if we are enabling temporal layering as well, some codecs may not support layering 4350 // when B-frames are enabled. Detect this now so we can disable B frames if temporal layering 4351 // is preferred. 4352 AString tsSchema; 4353 int32_t preferBFrames = (int32_t)false; 4354 if (msg->findString("ts-schema", &tsSchema) 4355 && (!msg->findInt32("android._prefer-b-frames", &preferBFrames) || !preferBFrames)) { 4356 OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layering; 4357 InitOMXParams(&layering); 4358 layering.nPortIndex = kPortIndexOutput; 4359 if (mOMX->getParameter( 4360 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 4361 &layering, sizeof(layering)) == OK 4362 && layering.eSupportedPatterns 4363 && layering.nBLayerCountMax == 0) { 4364 h264type.nBFrames = 0; 4365 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4366 h264type.nAllowedPictureTypes &= ~OMX_VIDEO_PictureTypeB; 4367 ALOGI("disabling B-frames"); 4368 err = mOMX->setParameter( 4369 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4370 4371 if (err != OK) { 4372 return err; 4373 } 4374 } 4375 } 4376 4377 return configureBitrate(bitrate, bitrateMode); 4378} 4379 4380status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 4381 int32_t bitrate; 4382 float iFrameInterval; 4383 if (!msg->findInt32("bitrate", &bitrate) 4384 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 4385 return INVALID_OPERATION; 4386 } 4387 4388 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4389 4390 float frameRate; 4391 if (!msg->findFloat("frame-rate", &frameRate)) { 4392 int32_t tmp; 4393 if (!msg->findInt32("frame-rate", &tmp)) { 4394 return INVALID_OPERATION; 4395 } 4396 frameRate = (float)tmp; 4397 } 4398 4399 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 4400 InitOMXParams(&hevcType); 4401 hevcType.nPortIndex = kPortIndexOutput; 4402 4403 status_t err = OK; 4404 err = mOMX->getParameter( 4405 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4406 if (err != OK) { 4407 return err; 4408 } 4409 4410 int32_t profile; 4411 if (msg->findInt32("profile", &profile)) { 4412 int32_t level; 4413 if (!msg->findInt32("level", &level)) { 4414 return INVALID_OPERATION; 4415 } 4416 4417 err = verifySupportForProfileAndLevel(profile, level); 4418 if (err != OK) { 4419 return err; 4420 } 4421 4422 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 4423 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 4424 } 4425 // TODO: finer control? 4426 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1; 4427 4428 err = mOMX->setParameter( 4429 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4430 if (err != OK) { 4431 return err; 4432 } 4433 4434 return configureBitrate(bitrate, bitrateMode); 4435} 4436 4437status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { 4438 int32_t bitrate; 4439 float iFrameInterval = 0; 4440 size_t tsLayers = 0; 4441 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 4442 OMX_VIDEO_VPXTemporalLayerPatternNone; 4443 static const uint32_t kVp8LayerRateAlloction 4444 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 4445 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 4446 {100, 100, 100}, // 1 layer 4447 { 60, 100, 100}, // 2 layers {60%, 40%} 4448 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 4449 }; 4450 if (!msg->findInt32("bitrate", &bitrate)) { 4451 return INVALID_OPERATION; 4452 } 4453 msg->findAsFloat("i-frame-interval", &iFrameInterval); 4454 4455 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4456 4457 float frameRate; 4458 if (!msg->findFloat("frame-rate", &frameRate)) { 4459 int32_t tmp; 4460 if (!msg->findInt32("frame-rate", &tmp)) { 4461 return INVALID_OPERATION; 4462 } 4463 frameRate = (float)tmp; 4464 } 4465 4466 AString tsSchema; 4467 if (msg->findString("ts-schema", &tsSchema)) { 4468 unsigned int numLayers = 0; 4469 unsigned int numBLayers = 0; 4470 int tags; 4471 char dummy; 4472 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1 4473 && numLayers > 0) { 4474 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4475 tsLayers = numLayers; 4476 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c", 4477 &numLayers, &dummy, &numBLayers, &dummy)) 4478 && (tags == 1 || (tags == 3 && dummy == '+')) 4479 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) { 4480 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4481 // VPX does not have a concept of B-frames, so just count all layers 4482 tsLayers = numLayers + numBLayers; 4483 } else { 4484 ALOGW("Ignoring unsupported ts-schema [%s]", tsSchema.c_str()); 4485 } 4486 tsLayers = min(tsLayers, (size_t)OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS); 4487 } 4488 4489 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4490 InitOMXParams(&vp8type); 4491 vp8type.nPortIndex = kPortIndexOutput; 4492 status_t err = mOMX->getParameter( 4493 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4494 &vp8type, sizeof(vp8type)); 4495 4496 if (err == OK) { 4497 if (iFrameInterval > 0) { 4498 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1; 4499 } 4500 vp8type.eTemporalPattern = pattern; 4501 vp8type.nTemporalLayerCount = tsLayers; 4502 if (tsLayers > 0) { 4503 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4504 vp8type.nTemporalLayerBitrateRatio[i] = 4505 kVp8LayerRateAlloction[tsLayers - 1][i]; 4506 } 4507 } 4508 if (bitrateMode == OMX_Video_ControlRateConstant) { 4509 vp8type.nMinQuantizer = 2; 4510 vp8type.nMaxQuantizer = 63; 4511 } 4512 4513 err = mOMX->setParameter( 4514 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4515 &vp8type, sizeof(vp8type)); 4516 if (err != OK) { 4517 ALOGW("Extended VP8 parameters set failed: %d", err); 4518 } 4519 } 4520 4521 return configureBitrate(bitrate, bitrateMode); 4522} 4523 4524status_t ACodec::verifySupportForProfileAndLevel( 4525 int32_t profile, int32_t level) { 4526 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4527 InitOMXParams(¶ms); 4528 params.nPortIndex = kPortIndexOutput; 4529 4530 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4531 params.nProfileIndex = index; 4532 status_t err = mOMX->getParameter( 4533 mNode, 4534 OMX_IndexParamVideoProfileLevelQuerySupported, 4535 ¶ms, 4536 sizeof(params)); 4537 4538 if (err != OK) { 4539 return err; 4540 } 4541 4542 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4543 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4544 4545 if (profile == supportedProfile && level <= supportedLevel) { 4546 return OK; 4547 } 4548 4549 if (index == kMaxIndicesToCheck) { 4550 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4551 mComponentName.c_str(), index, 4552 params.eProfile, params.eLevel); 4553 } 4554 } 4555 return ERROR_UNSUPPORTED; 4556} 4557 4558status_t ACodec::configureBitrate( 4559 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4560 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4561 InitOMXParams(&bitrateType); 4562 bitrateType.nPortIndex = kPortIndexOutput; 4563 4564 status_t err = mOMX->getParameter( 4565 mNode, OMX_IndexParamVideoBitrate, 4566 &bitrateType, sizeof(bitrateType)); 4567 4568 if (err != OK) { 4569 return err; 4570 } 4571 4572 bitrateType.eControlRate = bitrateMode; 4573 bitrateType.nTargetBitrate = bitrate; 4574 4575 return mOMX->setParameter( 4576 mNode, OMX_IndexParamVideoBitrate, 4577 &bitrateType, sizeof(bitrateType)); 4578} 4579 4580status_t ACodec::setupErrorCorrectionParameters() { 4581 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4582 InitOMXParams(&errorCorrectionType); 4583 errorCorrectionType.nPortIndex = kPortIndexOutput; 4584 4585 status_t err = mOMX->getParameter( 4586 mNode, OMX_IndexParamVideoErrorCorrection, 4587 &errorCorrectionType, sizeof(errorCorrectionType)); 4588 4589 if (err != OK) { 4590 return OK; // Optional feature. Ignore this failure 4591 } 4592 4593 errorCorrectionType.bEnableHEC = OMX_FALSE; 4594 errorCorrectionType.bEnableResync = OMX_TRUE; 4595 errorCorrectionType.nResynchMarkerSpacing = 256; 4596 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4597 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4598 4599 return mOMX->setParameter( 4600 mNode, OMX_IndexParamVideoErrorCorrection, 4601 &errorCorrectionType, sizeof(errorCorrectionType)); 4602} 4603 4604status_t ACodec::setVideoFormatOnPort( 4605 OMX_U32 portIndex, 4606 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4607 float frameRate) { 4608 OMX_PARAM_PORTDEFINITIONTYPE def; 4609 InitOMXParams(&def); 4610 def.nPortIndex = portIndex; 4611 4612 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4613 4614 status_t err = mOMX->getParameter( 4615 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4616 if (err != OK) { 4617 return err; 4618 } 4619 4620 if (portIndex == kPortIndexInput) { 4621 // XXX Need a (much) better heuristic to compute input buffer sizes. 4622 const size_t X = 64 * 1024; 4623 if (def.nBufferSize < X) { 4624 def.nBufferSize = X; 4625 } 4626 } 4627 4628 if (def.eDomain != OMX_PortDomainVideo) { 4629 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4630 return FAILED_TRANSACTION; 4631 } 4632 4633 video_def->nFrameWidth = width; 4634 video_def->nFrameHeight = height; 4635 4636 if (portIndex == kPortIndexInput) { 4637 video_def->eCompressionFormat = compressionFormat; 4638 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4639 if (frameRate >= 0) { 4640 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4641 } 4642 } 4643 4644 err = mOMX->setParameter( 4645 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4646 4647 return err; 4648} 4649 4650status_t ACodec::initNativeWindow() { 4651 if (mNativeWindow != NULL) { 4652 return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4653 } 4654 4655 mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4656 return OK; 4657} 4658 4659size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4660 size_t n = 0; 4661 4662 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4663 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4664 4665 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4666 ++n; 4667 } 4668 } 4669 4670 return n; 4671} 4672 4673size_t ACodec::countBuffersOwnedByNativeWindow() const { 4674 size_t n = 0; 4675 4676 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4677 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4678 4679 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4680 ++n; 4681 } 4682 } 4683 4684 return n; 4685} 4686 4687void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4688 if (mNativeWindow == NULL) { 4689 return; 4690 } 4691 4692 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4693 && dequeueBufferFromNativeWindow() != NULL) { 4694 // these buffers will be submitted as regular buffers; account for this 4695 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4696 --mMetadataBuffersToSubmit; 4697 } 4698 } 4699} 4700 4701bool ACodec::allYourBuffersAreBelongToUs( 4702 OMX_U32 portIndex) { 4703 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4704 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4705 4706 if (info->mStatus != BufferInfo::OWNED_BY_US 4707 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4708 ALOGV("[%s] Buffer %u on port %u still has status %d", 4709 mComponentName.c_str(), 4710 info->mBufferID, portIndex, info->mStatus); 4711 return false; 4712 } 4713 } 4714 4715 return true; 4716} 4717 4718bool ACodec::allYourBuffersAreBelongToUs() { 4719 return allYourBuffersAreBelongToUs(kPortIndexInput) 4720 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4721} 4722 4723void ACodec::deferMessage(const sp<AMessage> &msg) { 4724 mDeferredQueue.push_back(msg); 4725} 4726 4727void ACodec::processDeferredMessages() { 4728 List<sp<AMessage> > queue = mDeferredQueue; 4729 mDeferredQueue.clear(); 4730 4731 List<sp<AMessage> >::iterator it = queue.begin(); 4732 while (it != queue.end()) { 4733 onMessageReceived(*it++); 4734 } 4735} 4736 4737// static 4738bool ACodec::describeDefaultColorFormat(DescribeColorFormat2Params ¶ms) { 4739 MediaImage2 &image = params.sMediaImage; 4740 memset(&image, 0, sizeof(image)); 4741 4742 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4743 image.mNumPlanes = 0; 4744 4745 const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat; 4746 image.mWidth = params.nFrameWidth; 4747 image.mHeight = params.nFrameHeight; 4748 4749 // only supporting YUV420 4750 if (fmt != OMX_COLOR_FormatYUV420Planar && 4751 fmt != OMX_COLOR_FormatYUV420PackedPlanar && 4752 fmt != OMX_COLOR_FormatYUV420SemiPlanar && 4753 fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar && 4754 fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) { 4755 ALOGW("do not know color format 0x%x = %d", fmt, fmt); 4756 return false; 4757 } 4758 4759 // TEMPORARY FIX for some vendors that advertise sliceHeight as 0 4760 if (params.nStride != 0 && params.nSliceHeight == 0) { 4761 ALOGW("using sliceHeight=%u instead of what codec advertised (=0)", 4762 params.nFrameHeight); 4763 params.nSliceHeight = params.nFrameHeight; 4764 } 4765 4766 // we need stride and slice-height to be non-zero and sensible. These values were chosen to 4767 // prevent integer overflows further down the line, and do not indicate support for 4768 // 32kx32k video. 4769 if (params.nStride == 0 || params.nSliceHeight == 0 4770 || params.nStride > 32768 || params.nSliceHeight > 32768) { 4771 ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u", 4772 fmt, fmt, params.nStride, params.nSliceHeight); 4773 return false; 4774 } 4775 4776 // set-up YUV format 4777 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV; 4778 image.mNumPlanes = 3; 4779 image.mBitDepth = 8; 4780 image.mBitDepthAllocated = 8; 4781 image.mPlane[image.Y].mOffset = 0; 4782 image.mPlane[image.Y].mColInc = 1; 4783 image.mPlane[image.Y].mRowInc = params.nStride; 4784 image.mPlane[image.Y].mHorizSubsampling = 1; 4785 image.mPlane[image.Y].mVertSubsampling = 1; 4786 4787 switch ((int)fmt) { 4788 case HAL_PIXEL_FORMAT_YV12: 4789 if (params.bUsingNativeBuffers) { 4790 size_t ystride = align(params.nStride, 16); 4791 size_t cstride = align(params.nStride / 2, 16); 4792 image.mPlane[image.Y].mRowInc = ystride; 4793 4794 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight; 4795 image.mPlane[image.V].mColInc = 1; 4796 image.mPlane[image.V].mRowInc = cstride; 4797 image.mPlane[image.V].mHorizSubsampling = 2; 4798 image.mPlane[image.V].mVertSubsampling = 2; 4799 4800 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset 4801 + (cstride * params.nSliceHeight / 2); 4802 image.mPlane[image.U].mColInc = 1; 4803 image.mPlane[image.U].mRowInc = cstride; 4804 image.mPlane[image.U].mHorizSubsampling = 2; 4805 image.mPlane[image.U].mVertSubsampling = 2; 4806 break; 4807 } else { 4808 // fall through as YV12 is used for YUV420Planar by some codecs 4809 } 4810 4811 case OMX_COLOR_FormatYUV420Planar: 4812 case OMX_COLOR_FormatYUV420PackedPlanar: 4813 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4814 image.mPlane[image.U].mColInc = 1; 4815 image.mPlane[image.U].mRowInc = params.nStride / 2; 4816 image.mPlane[image.U].mHorizSubsampling = 2; 4817 image.mPlane[image.U].mVertSubsampling = 2; 4818 4819 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset 4820 + (params.nStride * params.nSliceHeight / 4); 4821 image.mPlane[image.V].mColInc = 1; 4822 image.mPlane[image.V].mRowInc = params.nStride / 2; 4823 image.mPlane[image.V].mHorizSubsampling = 2; 4824 image.mPlane[image.V].mVertSubsampling = 2; 4825 break; 4826 4827 case OMX_COLOR_FormatYUV420SemiPlanar: 4828 // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder 4829 case OMX_COLOR_FormatYUV420PackedSemiPlanar: 4830 // NV12 4831 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4832 image.mPlane[image.U].mColInc = 2; 4833 image.mPlane[image.U].mRowInc = params.nStride; 4834 image.mPlane[image.U].mHorizSubsampling = 2; 4835 image.mPlane[image.U].mVertSubsampling = 2; 4836 4837 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1; 4838 image.mPlane[image.V].mColInc = 2; 4839 image.mPlane[image.V].mRowInc = params.nStride; 4840 image.mPlane[image.V].mHorizSubsampling = 2; 4841 image.mPlane[image.V].mVertSubsampling = 2; 4842 break; 4843 4844 default: 4845 TRESPASS(); 4846 } 4847 return true; 4848} 4849 4850// static 4851bool ACodec::describeColorFormat( 4852 const sp<IOMX> &omx, IOMX::node_id node, 4853 DescribeColorFormat2Params &describeParams) 4854{ 4855 OMX_INDEXTYPE describeColorFormatIndex; 4856 if (omx->getExtensionIndex( 4857 node, "OMX.google.android.index.describeColorFormat", 4858 &describeColorFormatIndex) == OK) { 4859 DescribeColorFormatParams describeParamsV1(describeParams); 4860 if (omx->getParameter( 4861 node, describeColorFormatIndex, 4862 &describeParamsV1, sizeof(describeParamsV1)) == OK) { 4863 describeParams.initFromV1(describeParamsV1); 4864 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4865 } 4866 } else if (omx->getExtensionIndex( 4867 node, "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK 4868 && omx->getParameter( 4869 node, describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) { 4870 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4871 } 4872 4873 return describeDefaultColorFormat(describeParams); 4874} 4875 4876// static 4877bool ACodec::isFlexibleColorFormat( 4878 const sp<IOMX> &omx, IOMX::node_id node, 4879 uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) { 4880 DescribeColorFormat2Params describeParams; 4881 InitOMXParams(&describeParams); 4882 describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; 4883 // reasonable dummy values 4884 describeParams.nFrameWidth = 128; 4885 describeParams.nFrameHeight = 128; 4886 describeParams.nStride = 128; 4887 describeParams.nSliceHeight = 128; 4888 describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers; 4889 4890 CHECK(flexibleEquivalent != NULL); 4891 4892 if (!describeColorFormat(omx, node, describeParams)) { 4893 return false; 4894 } 4895 4896 const MediaImage2 &img = describeParams.sMediaImage; 4897 if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) { 4898 if (img.mNumPlanes != 3 4899 || img.mPlane[img.Y].mHorizSubsampling != 1 4900 || img.mPlane[img.Y].mVertSubsampling != 1) { 4901 return false; 4902 } 4903 4904 // YUV 420 4905 if (img.mPlane[img.U].mHorizSubsampling == 2 4906 && img.mPlane[img.U].mVertSubsampling == 2 4907 && img.mPlane[img.V].mHorizSubsampling == 2 4908 && img.mPlane[img.V].mVertSubsampling == 2) { 4909 // possible flexible YUV420 format 4910 if (img.mBitDepth <= 8) { 4911 *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible; 4912 return true; 4913 } 4914 } 4915 } 4916 return false; 4917} 4918 4919status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4920 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4921 OMX_PARAM_PORTDEFINITIONTYPE def; 4922 InitOMXParams(&def); 4923 def.nPortIndex = portIndex; 4924 4925 status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4926 if (err != OK) { 4927 return err; 4928 } 4929 4930 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4931 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4932 return BAD_VALUE; 4933 } 4934 4935 switch (def.eDomain) { 4936 case OMX_PortDomainVideo: 4937 { 4938 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4939 switch ((int)videoDef->eCompressionFormat) { 4940 case OMX_VIDEO_CodingUnused: 4941 { 4942 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4943 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4944 4945 notify->setInt32("stride", videoDef->nStride); 4946 notify->setInt32("slice-height", videoDef->nSliceHeight); 4947 notify->setInt32("color-format", videoDef->eColorFormat); 4948 4949 if (mNativeWindow == NULL) { 4950 DescribeColorFormat2Params describeParams; 4951 InitOMXParams(&describeParams); 4952 describeParams.eColorFormat = videoDef->eColorFormat; 4953 describeParams.nFrameWidth = videoDef->nFrameWidth; 4954 describeParams.nFrameHeight = videoDef->nFrameHeight; 4955 describeParams.nStride = videoDef->nStride; 4956 describeParams.nSliceHeight = videoDef->nSliceHeight; 4957 describeParams.bUsingNativeBuffers = OMX_FALSE; 4958 4959 if (describeColorFormat(mOMX, mNode, describeParams)) { 4960 notify->setBuffer( 4961 "image-data", 4962 ABuffer::CreateAsCopy( 4963 &describeParams.sMediaImage, 4964 sizeof(describeParams.sMediaImage))); 4965 4966 MediaImage2 &img = describeParams.sMediaImage; 4967 MediaImage2::PlaneInfo *plane = img.mPlane; 4968 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4969 mComponentName.c_str(), img.mWidth, img.mHeight, 4970 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4971 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4972 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4973 } 4974 } 4975 4976 int32_t width = (int32_t)videoDef->nFrameWidth; 4977 int32_t height = (int32_t)videoDef->nFrameHeight; 4978 4979 if (portIndex == kPortIndexOutput) { 4980 OMX_CONFIG_RECTTYPE rect; 4981 InitOMXParams(&rect); 4982 rect.nPortIndex = portIndex; 4983 4984 if (mOMX->getConfig( 4985 mNode, 4986 (portIndex == kPortIndexOutput ? 4987 OMX_IndexConfigCommonOutputCrop : 4988 OMX_IndexConfigCommonInputCrop), 4989 &rect, sizeof(rect)) != OK) { 4990 rect.nLeft = 0; 4991 rect.nTop = 0; 4992 rect.nWidth = videoDef->nFrameWidth; 4993 rect.nHeight = videoDef->nFrameHeight; 4994 } 4995 4996 if (rect.nLeft < 0 || 4997 rect.nTop < 0 || 4998 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4999 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 5000 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 5001 rect.nLeft, rect.nTop, 5002 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 5003 videoDef->nFrameWidth, videoDef->nFrameHeight); 5004 return BAD_VALUE; 5005 } 5006 5007 notify->setRect( 5008 "crop", 5009 rect.nLeft, 5010 rect.nTop, 5011 rect.nLeft + rect.nWidth - 1, 5012 rect.nTop + rect.nHeight - 1); 5013 5014 width = rect.nWidth; 5015 height = rect.nHeight; 5016 5017 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 5018 (void)getColorAspectsAndDataSpaceForVideoDecoder( 5019 width, height, mConfigFormat, notify, 5020 mUsingNativeWindow ? &dataSpace : NULL); 5021 if (mUsingNativeWindow) { 5022 notify->setInt32("android._dataspace", dataSpace); 5023 } 5024 (void)getHDRStaticInfoForVideoCodec(kPortIndexOutput, notify); 5025 } else { 5026 (void)getInputColorAspectsForVideoEncoder(notify); 5027 if (mConfigFormat->contains("hdr-static-info")) { 5028 (void)getHDRStaticInfoForVideoCodec(kPortIndexInput, notify); 5029 } 5030 } 5031 5032 break; 5033 } 5034 5035 case OMX_VIDEO_CodingVP8: 5036 case OMX_VIDEO_CodingVP9: 5037 { 5038 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 5039 InitOMXParams(&vp8type); 5040 vp8type.nPortIndex = kPortIndexOutput; 5041 status_t err = mOMX->getParameter( 5042 mNode, 5043 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 5044 &vp8type, 5045 sizeof(vp8type)); 5046 5047 if (err == OK) { 5048 AString tsSchema = "none"; 5049 if (vp8type.eTemporalPattern 5050 == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { 5051 switch (vp8type.nTemporalLayerCount) { 5052 case 1: 5053 { 5054 tsSchema = "webrtc.vp8.1-layer"; 5055 break; 5056 } 5057 case 2: 5058 { 5059 tsSchema = "webrtc.vp8.2-layer"; 5060 break; 5061 } 5062 case 3: 5063 { 5064 tsSchema = "webrtc.vp8.3-layer"; 5065 break; 5066 } 5067 default: 5068 { 5069 break; 5070 } 5071 } 5072 } 5073 notify->setString("ts-schema", tsSchema); 5074 } 5075 // Fall through to set up mime. 5076 } 5077 5078 default: 5079 { 5080 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 5081 // should be CodingUnused 5082 ALOGE("Raw port video compression format is %s(%d)", 5083 asString(videoDef->eCompressionFormat), 5084 videoDef->eCompressionFormat); 5085 return BAD_VALUE; 5086 } 5087 AString mime; 5088 if (GetMimeTypeForVideoCoding( 5089 videoDef->eCompressionFormat, &mime) != OK) { 5090 notify->setString("mime", "application/octet-stream"); 5091 } else { 5092 notify->setString("mime", mime.c_str()); 5093 } 5094 uint32_t intraRefreshPeriod = 0; 5095 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 5096 && intraRefreshPeriod > 0) { 5097 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 5098 } 5099 break; 5100 } 5101 } 5102 notify->setInt32("width", videoDef->nFrameWidth); 5103 notify->setInt32("height", videoDef->nFrameHeight); 5104 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 5105 portIndex == kPortIndexInput ? "input" : "output", 5106 notify->debugString().c_str()); 5107 5108 break; 5109 } 5110 5111 case OMX_PortDomainAudio: 5112 { 5113 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 5114 5115 switch ((int)audioDef->eEncoding) { 5116 case OMX_AUDIO_CodingPCM: 5117 { 5118 OMX_AUDIO_PARAM_PCMMODETYPE params; 5119 InitOMXParams(¶ms); 5120 params.nPortIndex = portIndex; 5121 5122 err = mOMX->getParameter( 5123 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5124 if (err != OK) { 5125 return err; 5126 } 5127 5128 if (params.nChannels <= 0 5129 || (params.nChannels != 1 && !params.bInterleaved) 5130 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 5131 ALOGE("unsupported PCM port: %u channels%s, %u-bit", 5132 params.nChannels, 5133 params.bInterleaved ? " interleaved" : "", 5134 params.nBitPerSample); 5135 return FAILED_TRANSACTION; 5136 } 5137 5138 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 5139 notify->setInt32("channel-count", params.nChannels); 5140 notify->setInt32("sample-rate", params.nSamplingRate); 5141 5142 AudioEncoding encoding = kAudioEncodingPcm16bit; 5143 if (params.eNumData == OMX_NumericalDataUnsigned 5144 && params.nBitPerSample == 8u) { 5145 encoding = kAudioEncodingPcm8bit; 5146 } else if (params.eNumData == OMX_NumericalDataFloat 5147 && params.nBitPerSample == 32u) { 5148 encoding = kAudioEncodingPcmFloat; 5149 } else if (params.nBitPerSample != 16u 5150 || params.eNumData != OMX_NumericalDataSigned) { 5151 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", 5152 asString(params.eNumData), params.eNumData, 5153 asString(params.ePCMMode), params.ePCMMode); 5154 return FAILED_TRANSACTION; 5155 } 5156 notify->setInt32("pcm-encoding", encoding); 5157 5158 if (mChannelMaskPresent) { 5159 notify->setInt32("channel-mask", mChannelMask); 5160 } 5161 break; 5162 } 5163 5164 case OMX_AUDIO_CodingAAC: 5165 { 5166 OMX_AUDIO_PARAM_AACPROFILETYPE params; 5167 InitOMXParams(¶ms); 5168 params.nPortIndex = portIndex; 5169 5170 err = mOMX->getParameter( 5171 mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 5172 if (err != OK) { 5173 return err; 5174 } 5175 5176 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 5177 notify->setInt32("channel-count", params.nChannels); 5178 notify->setInt32("sample-rate", params.nSampleRate); 5179 break; 5180 } 5181 5182 case OMX_AUDIO_CodingAMR: 5183 { 5184 OMX_AUDIO_PARAM_AMRTYPE params; 5185 InitOMXParams(¶ms); 5186 params.nPortIndex = portIndex; 5187 5188 err = mOMX->getParameter( 5189 mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 5190 if (err != OK) { 5191 return err; 5192 } 5193 5194 notify->setInt32("channel-count", 1); 5195 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 5196 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 5197 notify->setInt32("sample-rate", 16000); 5198 } else { 5199 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 5200 notify->setInt32("sample-rate", 8000); 5201 } 5202 break; 5203 } 5204 5205 case OMX_AUDIO_CodingFLAC: 5206 { 5207 OMX_AUDIO_PARAM_FLACTYPE params; 5208 InitOMXParams(¶ms); 5209 params.nPortIndex = portIndex; 5210 5211 err = mOMX->getParameter( 5212 mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 5213 if (err != OK) { 5214 return err; 5215 } 5216 5217 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 5218 notify->setInt32("channel-count", params.nChannels); 5219 notify->setInt32("sample-rate", params.nSampleRate); 5220 break; 5221 } 5222 5223 case OMX_AUDIO_CodingMP3: 5224 { 5225 OMX_AUDIO_PARAM_MP3TYPE params; 5226 InitOMXParams(¶ms); 5227 params.nPortIndex = portIndex; 5228 5229 err = mOMX->getParameter( 5230 mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 5231 if (err != OK) { 5232 return err; 5233 } 5234 5235 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 5236 notify->setInt32("channel-count", params.nChannels); 5237 notify->setInt32("sample-rate", params.nSampleRate); 5238 break; 5239 } 5240 5241 case OMX_AUDIO_CodingVORBIS: 5242 { 5243 OMX_AUDIO_PARAM_VORBISTYPE params; 5244 InitOMXParams(¶ms); 5245 params.nPortIndex = portIndex; 5246 5247 err = mOMX->getParameter( 5248 mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 5249 if (err != OK) { 5250 return err; 5251 } 5252 5253 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 5254 notify->setInt32("channel-count", params.nChannels); 5255 notify->setInt32("sample-rate", params.nSampleRate); 5256 break; 5257 } 5258 5259 case OMX_AUDIO_CodingAndroidAC3: 5260 { 5261 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 5262 InitOMXParams(¶ms); 5263 params.nPortIndex = portIndex; 5264 5265 err = mOMX->getParameter( 5266 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 5267 ¶ms, sizeof(params)); 5268 if (err != OK) { 5269 return err; 5270 } 5271 5272 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 5273 notify->setInt32("channel-count", params.nChannels); 5274 notify->setInt32("sample-rate", params.nSampleRate); 5275 break; 5276 } 5277 5278 case OMX_AUDIO_CodingAndroidEAC3: 5279 { 5280 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 5281 InitOMXParams(¶ms); 5282 params.nPortIndex = portIndex; 5283 5284 err = mOMX->getParameter( 5285 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 5286 ¶ms, sizeof(params)); 5287 if (err != OK) { 5288 return err; 5289 } 5290 5291 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 5292 notify->setInt32("channel-count", params.nChannels); 5293 notify->setInt32("sample-rate", params.nSampleRate); 5294 break; 5295 } 5296 5297 case OMX_AUDIO_CodingAndroidOPUS: 5298 { 5299 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 5300 InitOMXParams(¶ms); 5301 params.nPortIndex = portIndex; 5302 5303 err = mOMX->getParameter( 5304 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 5305 ¶ms, sizeof(params)); 5306 if (err != OK) { 5307 return err; 5308 } 5309 5310 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 5311 notify->setInt32("channel-count", params.nChannels); 5312 notify->setInt32("sample-rate", params.nSampleRate); 5313 break; 5314 } 5315 5316 case OMX_AUDIO_CodingG711: 5317 { 5318 OMX_AUDIO_PARAM_PCMMODETYPE params; 5319 InitOMXParams(¶ms); 5320 params.nPortIndex = portIndex; 5321 5322 err = mOMX->getParameter( 5323 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5324 if (err != OK) { 5325 return err; 5326 } 5327 5328 const char *mime = NULL; 5329 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 5330 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 5331 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 5332 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 5333 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 5334 mime = MEDIA_MIMETYPE_AUDIO_RAW; 5335 } 5336 notify->setString("mime", mime); 5337 notify->setInt32("channel-count", params.nChannels); 5338 notify->setInt32("sample-rate", params.nSamplingRate); 5339 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 5340 break; 5341 } 5342 5343 case OMX_AUDIO_CodingGSMFR: 5344 { 5345 OMX_AUDIO_PARAM_PCMMODETYPE params; 5346 InitOMXParams(¶ms); 5347 params.nPortIndex = portIndex; 5348 5349 err = mOMX->getParameter( 5350 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5351 if (err != OK) { 5352 return err; 5353 } 5354 5355 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 5356 notify->setInt32("channel-count", params.nChannels); 5357 notify->setInt32("sample-rate", params.nSamplingRate); 5358 break; 5359 } 5360 5361 default: 5362 ALOGE("Unsupported audio coding: %s(%d)\n", 5363 asString(audioDef->eEncoding), audioDef->eEncoding); 5364 return BAD_TYPE; 5365 } 5366 break; 5367 } 5368 5369 default: 5370 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 5371 return BAD_TYPE; 5372 } 5373 5374 return OK; 5375} 5376 5377void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 5378 // aspects are normally communicated in ColorAspects 5379 int32_t range, standard, transfer; 5380 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 5381 5382 // if some aspects are unspecified, use dataspace fields 5383 if (range != 0) { 5384 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 5385 } 5386 if (standard != 0) { 5387 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 5388 } 5389 if (transfer != 0) { 5390 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 5391 } 5392 5393 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 5394 if (range != 0) { 5395 mOutputFormat->setInt32("color-range", range); 5396 } 5397 if (standard != 0) { 5398 mOutputFormat->setInt32("color-standard", standard); 5399 } 5400 if (transfer != 0) { 5401 mOutputFormat->setInt32("color-transfer", transfer); 5402 } 5403 5404 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 5405 "(R:%d(%s), S:%d(%s), T:%d(%s))", 5406 dataSpace, 5407 aspects.mRange, asString(aspects.mRange), 5408 aspects.mPrimaries, asString(aspects.mPrimaries), 5409 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 5410 aspects.mTransfer, asString(aspects.mTransfer), 5411 range, asString((ColorRange)range), 5412 standard, asString((ColorStandard)standard), 5413 transfer, asString((ColorTransfer)transfer)); 5414} 5415 5416void ACodec::onOutputFormatChanged(sp<const AMessage> expectedFormat) { 5417 // store new output format, at the same time mark that this is no longer the first frame 5418 mOutputFormat = mBaseOutputFormat->dup(); 5419 5420 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 5421 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 5422 return; 5423 } 5424 5425 if (expectedFormat != NULL) { 5426 sp<const AMessage> changes = expectedFormat->changesFrom(mOutputFormat); 5427 sp<const AMessage> to = mOutputFormat->changesFrom(expectedFormat); 5428 if (changes->countEntries() != 0 || to->countEntries() != 0) { 5429 ALOGW("[%s] BAD CODEC: Output format changed unexpectedly from (diff) %s to (diff) %s", 5430 mComponentName.c_str(), 5431 changes->debugString(4).c_str(), to->debugString(4).c_str()); 5432 } 5433 } 5434 5435 if (!mIsVideo && !mIsEncoder) { 5436 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 5437 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5438 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 5439 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5440 5441 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 5442 if (mConverter[kPortIndexOutput] != NULL) { 5443 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 5444 } 5445 } 5446 5447 if (mTunneled) { 5448 sendFormatChange(); 5449 } 5450} 5451 5452void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 5453 AString mime; 5454 CHECK(mOutputFormat->findString("mime", &mime)); 5455 5456 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 5457 // notify renderer of the crop change and dataspace change 5458 // NOTE: native window uses extended right-bottom coordinate 5459 int32_t left, top, right, bottom; 5460 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 5461 notify->setRect("crop", left, top, right + 1, bottom + 1); 5462 } 5463 5464 int32_t dataSpace; 5465 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 5466 notify->setInt32("dataspace", dataSpace); 5467 } 5468 } 5469} 5470 5471void ACodec::sendFormatChange() { 5472 AString mime; 5473 CHECK(mOutputFormat->findString("mime", &mime)); 5474 5475 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 5476 int32_t channelCount, sampleRate; 5477 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 5478 CHECK(mOutputFormat->findInt32("sample-rate", &sampleRate)); 5479 if (mSampleRate != 0 && sampleRate != 0) { 5480 mEncoderDelay = mEncoderDelay * sampleRate / mSampleRate; 5481 mEncoderPadding = mEncoderPadding * sampleRate / mSampleRate; 5482 mSampleRate = sampleRate; 5483 } 5484 if (mSkipCutBuffer != NULL) { 5485 size_t prevbufsize = mSkipCutBuffer->size(); 5486 if (prevbufsize != 0) { 5487 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 5488 } 5489 } 5490 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 5491 } 5492 5493 sp<AMessage> notify = mNotify->dup(); 5494 notify->setInt32("what", kWhatOutputFormatChanged); 5495 notify->setMessage("format", mOutputFormat); 5496 notify->post(); 5497 5498 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 5499 mLastOutputFormat = mOutputFormat; 5500} 5501 5502void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 5503 sp<AMessage> notify = mNotify->dup(); 5504 notify->setInt32("what", CodecBase::kWhatError); 5505 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 5506 5507 if (internalError == UNKNOWN_ERROR) { // find better error code 5508 const status_t omxStatus = statusFromOMXError(error); 5509 if (omxStatus != 0) { 5510 internalError = omxStatus; 5511 } else { 5512 ALOGW("Invalid OMX error %#x", error); 5513 } 5514 } 5515 5516 mFatalError = true; 5517 5518 notify->setInt32("err", internalError); 5519 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5520 notify->post(); 5521} 5522 5523//////////////////////////////////////////////////////////////////////////////// 5524 5525ACodec::PortDescription::PortDescription() { 5526} 5527 5528status_t ACodec::requestIDRFrame() { 5529 if (!mIsEncoder) { 5530 return ERROR_UNSUPPORTED; 5531 } 5532 5533 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5534 InitOMXParams(¶ms); 5535 5536 params.nPortIndex = kPortIndexOutput; 5537 params.IntraRefreshVOP = OMX_TRUE; 5538 5539 return mOMX->setConfig( 5540 mNode, 5541 OMX_IndexConfigVideoIntraVOPRefresh, 5542 ¶ms, 5543 sizeof(params)); 5544} 5545 5546void ACodec::PortDescription::addBuffer( 5547 IOMX::buffer_id id, const sp<ABuffer> &buffer, 5548 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) { 5549 mBufferIDs.push_back(id); 5550 mBuffers.push_back(buffer); 5551 mHandles.push_back(handle); 5552 mMemRefs.push_back(memRef); 5553} 5554 5555size_t ACodec::PortDescription::countBuffers() { 5556 return mBufferIDs.size(); 5557} 5558 5559IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5560 return mBufferIDs.itemAt(index); 5561} 5562 5563sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5564 return mBuffers.itemAt(index); 5565} 5566 5567sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const { 5568 return mHandles.itemAt(index); 5569} 5570 5571sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const { 5572 return mMemRefs.itemAt(index); 5573} 5574 5575//////////////////////////////////////////////////////////////////////////////// 5576 5577ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5578 : AState(parentState), 5579 mCodec(codec) { 5580} 5581 5582ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5583 OMX_U32 /* portIndex */) { 5584 return KEEP_BUFFERS; 5585} 5586 5587bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5588 switch (msg->what()) { 5589 case kWhatInputBufferFilled: 5590 { 5591 onInputBufferFilled(msg); 5592 break; 5593 } 5594 5595 case kWhatOutputBufferDrained: 5596 { 5597 onOutputBufferDrained(msg); 5598 break; 5599 } 5600 5601 case ACodec::kWhatOMXMessageList: 5602 { 5603 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5604 } 5605 5606 case ACodec::kWhatOMXMessageItem: 5607 { 5608 // no need to check as we already did it for kWhatOMXMessageList 5609 return onOMXMessage(msg); 5610 } 5611 5612 case ACodec::kWhatOMXMessage: 5613 { 5614 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5615 } 5616 5617 case ACodec::kWhatSetSurface: 5618 { 5619 sp<AReplyToken> replyID; 5620 CHECK(msg->senderAwaitsResponse(&replyID)); 5621 5622 sp<RefBase> obj; 5623 CHECK(msg->findObject("surface", &obj)); 5624 5625 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5626 5627 sp<AMessage> response = new AMessage; 5628 response->setInt32("err", err); 5629 response->postReply(replyID); 5630 break; 5631 } 5632 5633 case ACodec::kWhatCreateInputSurface: 5634 case ACodec::kWhatSetInputSurface: 5635 case ACodec::kWhatSignalEndOfInputStream: 5636 { 5637 // This may result in an app illegal state exception. 5638 ALOGE("Message 0x%x was not handled", msg->what()); 5639 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5640 return true; 5641 } 5642 5643 case ACodec::kWhatOMXDied: 5644 { 5645 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5646 ALOGE("OMX/mediaserver died, signalling error!"); 5647 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5648 break; 5649 } 5650 5651 case ACodec::kWhatReleaseCodecInstance: 5652 { 5653 ALOGI("[%s] forcing the release of codec", 5654 mCodec->mComponentName.c_str()); 5655 status_t err = mCodec->mOMX->freeNode(mCodec->mNode); 5656 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5657 mCodec->mComponentName.c_str(), err); 5658 sp<AMessage> notify = mCodec->mNotify->dup(); 5659 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5660 notify->post(); 5661 break; 5662 } 5663 5664 default: 5665 return false; 5666 } 5667 5668 return true; 5669} 5670 5671bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5672 // there is a possibility that this is an outstanding message for a 5673 // codec that we have already destroyed 5674 if (mCodec->mNode == 0) { 5675 ALOGI("ignoring message as already freed component: %s", 5676 msg->debugString().c_str()); 5677 return false; 5678 } 5679 5680 IOMX::node_id nodeID; 5681 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 5682 if (nodeID != mCodec->mNode) { 5683 ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode); 5684 return false; 5685 } 5686 return true; 5687} 5688 5689bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5690 sp<RefBase> obj; 5691 CHECK(msg->findObject("messages", &obj)); 5692 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5693 5694 bool receivedRenderedEvents = false; 5695 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5696 it != msgList->getList().cend(); ++it) { 5697 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5698 mCodec->handleMessage(*it); 5699 int32_t type; 5700 CHECK((*it)->findInt32("type", &type)); 5701 if (type == omx_message::FRAME_RENDERED) { 5702 receivedRenderedEvents = true; 5703 } 5704 } 5705 5706 if (receivedRenderedEvents) { 5707 // NOTE: all buffers are rendered in this case 5708 mCodec->notifyOfRenderedFrames(); 5709 } 5710 return true; 5711} 5712 5713bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5714 int32_t type; 5715 CHECK(msg->findInt32("type", &type)); 5716 5717 switch (type) { 5718 case omx_message::EVENT: 5719 { 5720 int32_t event, data1, data2; 5721 CHECK(msg->findInt32("event", &event)); 5722 CHECK(msg->findInt32("data1", &data1)); 5723 CHECK(msg->findInt32("data2", &data2)); 5724 5725 if (event == OMX_EventCmdComplete 5726 && data1 == OMX_CommandFlush 5727 && data2 == (int32_t)OMX_ALL) { 5728 // Use of this notification is not consistent across 5729 // implementations. We'll drop this notification and rely 5730 // on flush-complete notifications on the individual port 5731 // indices instead. 5732 5733 return true; 5734 } 5735 5736 return onOMXEvent( 5737 static_cast<OMX_EVENTTYPE>(event), 5738 static_cast<OMX_U32>(data1), 5739 static_cast<OMX_U32>(data2)); 5740 } 5741 5742 case omx_message::EMPTY_BUFFER_DONE: 5743 { 5744 IOMX::buffer_id bufferID; 5745 int32_t fenceFd; 5746 5747 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5748 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5749 5750 return onOMXEmptyBufferDone(bufferID, fenceFd); 5751 } 5752 5753 case omx_message::FILL_BUFFER_DONE: 5754 { 5755 IOMX::buffer_id bufferID; 5756 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5757 5758 int32_t rangeOffset, rangeLength, flags, fenceFd; 5759 int64_t timeUs; 5760 5761 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5762 CHECK(msg->findInt32("range_length", &rangeLength)); 5763 CHECK(msg->findInt32("flags", &flags)); 5764 CHECK(msg->findInt64("timestamp", &timeUs)); 5765 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5766 5767 return onOMXFillBufferDone( 5768 bufferID, 5769 (size_t)rangeOffset, (size_t)rangeLength, 5770 (OMX_U32)flags, 5771 timeUs, 5772 fenceFd); 5773 } 5774 5775 case omx_message::FRAME_RENDERED: 5776 { 5777 int64_t mediaTimeUs, systemNano; 5778 5779 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5780 CHECK(msg->findInt64("system_nano", &systemNano)); 5781 5782 return onOMXFrameRendered( 5783 mediaTimeUs, systemNano); 5784 } 5785 5786 default: 5787 ALOGE("Unexpected message type: %d", type); 5788 return false; 5789 } 5790} 5791 5792bool ACodec::BaseState::onOMXFrameRendered( 5793 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5794 // ignore outside of Executing and PortSettingsChanged states 5795 return true; 5796} 5797 5798bool ACodec::BaseState::onOMXEvent( 5799 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5800 if (event == OMX_EventDataSpaceChanged) { 5801 ColorAspects aspects; 5802 aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF); 5803 aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF); 5804 aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF); 5805 aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF); 5806 5807 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5808 return true; 5809 } 5810 5811 if (event != OMX_EventError) { 5812 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5813 mCodec->mComponentName.c_str(), event, data1, data2); 5814 5815 return false; 5816 } 5817 5818 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5819 5820 // verify OMX component sends back an error we expect. 5821 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5822 if (!isOMXError(omxError)) { 5823 ALOGW("Invalid OMX error %#x", omxError); 5824 omxError = OMX_ErrorUndefined; 5825 } 5826 mCodec->signalError(omxError); 5827 5828 return true; 5829} 5830 5831bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5832 ALOGV("[%s] onOMXEmptyBufferDone %u", 5833 mCodec->mComponentName.c_str(), bufferID); 5834 5835 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5836 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5837 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5838 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5839 mCodec->dumpBuffers(kPortIndexInput); 5840 if (fenceFd >= 0) { 5841 ::close(fenceFd); 5842 } 5843 return false; 5844 } 5845 info->mStatus = BufferInfo::OWNED_BY_US; 5846 5847 // input buffers cannot take fences, so wait for any fence now 5848 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5849 fenceFd = -1; 5850 5851 // still save fence for completeness 5852 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5853 5854 // We're in "store-metadata-in-buffers" mode, the underlying 5855 // OMX component had access to data that's implicitly refcounted 5856 // by this "MediaBuffer" object. Now that the OMX component has 5857 // told us that it's done with the input buffer, we can decrement 5858 // the mediaBuffer's reference count. 5859 info->mData->setMediaBufferBase(NULL); 5860 5861 PortMode mode = getPortMode(kPortIndexInput); 5862 5863 switch (mode) { 5864 case KEEP_BUFFERS: 5865 break; 5866 5867 case RESUBMIT_BUFFERS: 5868 postFillThisBuffer(info); 5869 break; 5870 5871 case FREE_BUFFERS: 5872 default: 5873 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5874 return false; 5875 } 5876 5877 return true; 5878} 5879 5880void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5881 if (mCodec->mPortEOS[kPortIndexInput]) { 5882 return; 5883 } 5884 5885 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5886 5887 sp<AMessage> notify = mCodec->mNotify->dup(); 5888 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5889 notify->setInt32("buffer-id", info->mBufferID); 5890 5891 info->mData->meta()->clear(); 5892 notify->setBuffer("buffer", info->mData); 5893 5894 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5895 reply->setInt32("buffer-id", info->mBufferID); 5896 5897 notify->setMessage("reply", reply); 5898 5899 notify->post(); 5900 5901 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5902} 5903 5904void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5905 IOMX::buffer_id bufferID; 5906 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5907 sp<ABuffer> buffer; 5908 int32_t err = OK; 5909 bool eos = false; 5910 PortMode mode = getPortMode(kPortIndexInput); 5911 5912 if (!msg->findBuffer("buffer", &buffer)) { 5913 /* these are unfilled buffers returned by client */ 5914 CHECK(msg->findInt32("err", &err)); 5915 5916 if (err == OK) { 5917 /* buffers with no errors are returned on MediaCodec.flush */ 5918 mode = KEEP_BUFFERS; 5919 } else { 5920 ALOGV("[%s] saw error %d instead of an input buffer", 5921 mCodec->mComponentName.c_str(), err); 5922 eos = true; 5923 } 5924 5925 buffer.clear(); 5926 } 5927 5928 int32_t tmp; 5929 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5930 eos = true; 5931 err = ERROR_END_OF_STREAM; 5932 } 5933 5934 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5935 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5936 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5937 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5938 mCodec->dumpBuffers(kPortIndexInput); 5939 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5940 return; 5941 } 5942 5943 info->mStatus = BufferInfo::OWNED_BY_US; 5944 5945 switch (mode) { 5946 case KEEP_BUFFERS: 5947 { 5948 if (eos) { 5949 if (!mCodec->mPortEOS[kPortIndexInput]) { 5950 mCodec->mPortEOS[kPortIndexInput] = true; 5951 mCodec->mInputEOSResult = err; 5952 } 5953 } 5954 break; 5955 } 5956 5957 case RESUBMIT_BUFFERS: 5958 { 5959 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5960 // Do not send empty input buffer w/o EOS to the component. 5961 if (buffer->size() == 0 && !eos) { 5962 postFillThisBuffer(info); 5963 break; 5964 } 5965 5966 int64_t timeUs; 5967 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5968 5969 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5970 5971 MetadataBufferType metaType = mCodec->mInputMetadataType; 5972 int32_t isCSD = 0; 5973 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5974 if (mCodec->mIsLegacyVP9Decoder) { 5975 ALOGV("[%s] is legacy VP9 decoder. Ignore %u codec specific data", 5976 mCodec->mComponentName.c_str(), bufferID); 5977 postFillThisBuffer(info); 5978 break; 5979 } 5980 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5981 metaType = kMetadataBufferTypeInvalid; 5982 } 5983 5984 if (eos) { 5985 flags |= OMX_BUFFERFLAG_EOS; 5986 } 5987 5988 if (buffer != info->mCodecData) { 5989 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5990 mCodec->mComponentName.c_str(), 5991 bufferID, 5992 buffer.get(), info->mCodecData.get()); 5993 5994 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; 5995 if (converter == NULL || isCSD) { 5996 converter = getCopyConverter(); 5997 } 5998 status_t err = converter->convert(buffer, info->mCodecData); 5999 if (err != OK) { 6000 mCodec->signalError(OMX_ErrorUndefined, err); 6001 return; 6002 } 6003 } 6004 6005 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 6006 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 6007 mCodec->mComponentName.c_str(), bufferID); 6008 } else if (flags & OMX_BUFFERFLAG_EOS) { 6009 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 6010 mCodec->mComponentName.c_str(), bufferID); 6011 } else { 6012#if TRACK_BUFFER_TIMING 6013 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 6014 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 6015#else 6016 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 6017 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 6018#endif 6019 } 6020 6021#if TRACK_BUFFER_TIMING 6022 ACodec::BufferStats stats; 6023 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 6024 stats.mFillBufferDoneTimeUs = -1ll; 6025 mCodec->mBufferStats.add(timeUs, stats); 6026#endif 6027 6028 if (mCodec->storingMetadataInDecodedBuffers()) { 6029 // try to submit an output buffer for each input buffer 6030 PortMode outputMode = getPortMode(kPortIndexOutput); 6031 6032 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 6033 mCodec->mMetadataBuffersToSubmit, 6034 (outputMode == FREE_BUFFERS ? "FREE" : 6035 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 6036 if (outputMode == RESUBMIT_BUFFERS) { 6037 mCodec->submitOutputMetadataBuffer(); 6038 } 6039 } 6040 info->checkReadFence("onInputBufferFilled"); 6041 6042 status_t err2 = OK; 6043 switch (metaType) { 6044 case kMetadataBufferTypeInvalid: 6045 break; 6046#ifndef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 6047 case kMetadataBufferTypeNativeHandleSource: 6048 if (info->mCodecData->size() >= sizeof(VideoNativeHandleMetadata)) { 6049 VideoNativeHandleMetadata *vnhmd = 6050 (VideoNativeHandleMetadata*)info->mCodecData->base(); 6051 err2 = mCodec->mOMX->updateNativeHandleInMeta( 6052 mCodec->mNode, kPortIndexInput, 6053 NativeHandle::create(vnhmd->pHandle, false /* ownsHandle */), 6054 bufferID); 6055 } 6056 break; 6057 case kMetadataBufferTypeANWBuffer: 6058 if (info->mCodecData->size() >= sizeof(VideoNativeMetadata)) { 6059 VideoNativeMetadata *vnmd = (VideoNativeMetadata*)info->mCodecData->base(); 6060 err2 = mCodec->mOMX->updateGraphicBufferInMeta( 6061 mCodec->mNode, kPortIndexInput, 6062 new GraphicBuffer(vnmd->pBuffer, false /* keepOwnership */), 6063 bufferID); 6064 } 6065 break; 6066#endif 6067 default: 6068 ALOGW("Can't marshall %s data in %zu sized buffers in %zu-bit mode", 6069 asString(metaType), info->mCodecData->size(), 6070 sizeof(buffer_handle_t) * 8); 6071 err2 = ERROR_UNSUPPORTED; 6072 break; 6073 } 6074 6075 if (err2 == OK) { 6076 err2 = mCodec->mOMX->emptyBuffer( 6077 mCodec->mNode, 6078 bufferID, 6079 0, 6080 info->mCodecData->size(), 6081 flags, 6082 timeUs, 6083 info->mFenceFd); 6084 } 6085 info->mFenceFd = -1; 6086 if (err2 != OK) { 6087 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 6088 return; 6089 } 6090 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6091 6092 if (!eos && err == OK) { 6093 getMoreInputDataIfPossible(); 6094 } else { 6095 ALOGV("[%s] Signalled EOS (%d) on the input port", 6096 mCodec->mComponentName.c_str(), err); 6097 6098 mCodec->mPortEOS[kPortIndexInput] = true; 6099 mCodec->mInputEOSResult = err; 6100 } 6101 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 6102 if (err != OK && err != ERROR_END_OF_STREAM) { 6103 ALOGV("[%s] Signalling EOS on the input port due to error %d", 6104 mCodec->mComponentName.c_str(), err); 6105 } else { 6106 ALOGV("[%s] Signalling EOS on the input port", 6107 mCodec->mComponentName.c_str()); 6108 } 6109 6110 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 6111 mCodec->mComponentName.c_str(), bufferID); 6112 6113 info->checkReadFence("onInputBufferFilled"); 6114 status_t err2 = mCodec->mOMX->emptyBuffer( 6115 mCodec->mNode, 6116 bufferID, 6117 0, 6118 0, 6119 OMX_BUFFERFLAG_EOS, 6120 0, 6121 info->mFenceFd); 6122 info->mFenceFd = -1; 6123 if (err2 != OK) { 6124 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 6125 return; 6126 } 6127 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6128 6129 mCodec->mPortEOS[kPortIndexInput] = true; 6130 mCodec->mInputEOSResult = err; 6131 } 6132 break; 6133 } 6134 6135 case FREE_BUFFERS: 6136 break; 6137 6138 default: 6139 ALOGE("invalid port mode: %d", mode); 6140 break; 6141 } 6142} 6143 6144void ACodec::BaseState::getMoreInputDataIfPossible() { 6145 if (mCodec->mPortEOS[kPortIndexInput]) { 6146 return; 6147 } 6148 6149 BufferInfo *eligible = NULL; 6150 6151 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 6152 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6153 6154#if 0 6155 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 6156 // There's already a "read" pending. 6157 return; 6158 } 6159#endif 6160 6161 if (info->mStatus == BufferInfo::OWNED_BY_US) { 6162 eligible = info; 6163 } 6164 } 6165 6166 if (eligible == NULL) { 6167 return; 6168 } 6169 6170 postFillThisBuffer(eligible); 6171} 6172 6173bool ACodec::BaseState::onOMXFillBufferDone( 6174 IOMX::buffer_id bufferID, 6175 size_t rangeOffset, size_t rangeLength, 6176 OMX_U32 flags, 6177 int64_t timeUs, 6178 int fenceFd) { 6179 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 6180 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 6181 6182 ssize_t index; 6183 status_t err= OK; 6184 6185#if TRACK_BUFFER_TIMING 6186 index = mCodec->mBufferStats.indexOfKey(timeUs); 6187 if (index >= 0) { 6188 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 6189 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 6190 6191 ALOGI("frame PTS %lld: %lld", 6192 timeUs, 6193 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 6194 6195 mCodec->mBufferStats.removeItemsAt(index); 6196 stats = NULL; 6197 } 6198#endif 6199 6200 BufferInfo *info = 6201 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6202 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6203 if (status != BufferInfo::OWNED_BY_COMPONENT) { 6204 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6205 mCodec->dumpBuffers(kPortIndexOutput); 6206 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6207 if (fenceFd >= 0) { 6208 ::close(fenceFd); 6209 } 6210 return true; 6211 } 6212 6213 info->mDequeuedAt = ++mCodec->mDequeueCounter; 6214 info->mStatus = BufferInfo::OWNED_BY_US; 6215 6216 if (info->mRenderInfo != NULL) { 6217 // The fence for an emptied buffer must have signaled, but there still could be queued 6218 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 6219 // as we will soon requeue this buffer to the surface. While in theory we could still keep 6220 // track of buffers that are requeued to the surface, it is better to add support to the 6221 // buffer-queue to notify us of released buffers and their fences (in the future). 6222 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 6223 } 6224 6225 // byte buffers cannot take fences, so wait for any fence now 6226 if (mCodec->mNativeWindow == NULL) { 6227 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 6228 fenceFd = -1; 6229 } 6230 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 6231 6232 PortMode mode = getPortMode(kPortIndexOutput); 6233 6234 switch (mode) { 6235 case KEEP_BUFFERS: 6236 break; 6237 6238 case RESUBMIT_BUFFERS: 6239 { 6240 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 6241 || mCodec->mPortEOS[kPortIndexOutput])) { 6242 ALOGV("[%s] calling fillBuffer %u", 6243 mCodec->mComponentName.c_str(), info->mBufferID); 6244 6245 err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 6246 info->mFenceFd = -1; 6247 if (err != OK) { 6248 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6249 return true; 6250 } 6251 6252 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6253 break; 6254 } 6255 6256 sp<AMessage> reply = 6257 new AMessage(kWhatOutputBufferDrained, mCodec); 6258 6259 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 6260 // pretend that output format has changed on the first frame (we used to do this) 6261 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) { 6262 mCodec->onOutputFormatChanged(mCodec->mOutputFormat); 6263 } 6264 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 6265 mCodec->sendFormatChange(); 6266 } else if (rangeLength > 0 && mCodec->mNativeWindow != NULL) { 6267 // If potentially rendering onto a surface, always save key format data (crop & 6268 // data space) so that we can set it if and once the buffer is rendered. 6269 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 6270 } 6271 6272 if (mCodec->usingMetadataOnEncoderOutput()) { 6273 native_handle_t *handle = NULL; 6274 VideoNativeHandleMetadata &nativeMeta = 6275 *(VideoNativeHandleMetadata *)info->mData->data(); 6276 if (info->mData->size() >= sizeof(nativeMeta) 6277 && nativeMeta.eType == kMetadataBufferTypeNativeHandleSource) { 6278#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 6279 // handle is only valid on 32-bit/mediaserver process 6280 handle = NULL; 6281#else 6282 handle = (native_handle_t *)nativeMeta.pHandle; 6283#endif 6284 } 6285 info->mData->meta()->setPointer("handle", handle); 6286 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 6287 info->mData->meta()->setInt32("rangeLength", rangeLength); 6288 } else if (info->mData == info->mCodecData) { 6289 info->mData->setRange(rangeOffset, rangeLength); 6290 } else { 6291 info->mCodecData->setRange(rangeOffset, rangeLength); 6292 // in this case we know that mConverter is not null 6293 status_t err = mCodec->mConverter[kPortIndexOutput]->convert( 6294 info->mCodecData, info->mData); 6295 if (err != OK) { 6296 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6297 return true; 6298 } 6299 } 6300#if 0 6301 if (mCodec->mNativeWindow == NULL) { 6302 if (IsIDR(info->mData)) { 6303 ALOGI("IDR frame"); 6304 } 6305 } 6306#endif 6307 6308 if (mCodec->mSkipCutBuffer != NULL) { 6309 mCodec->mSkipCutBuffer->submit(info->mData); 6310 } 6311 info->mData->meta()->setInt64("timeUs", timeUs); 6312 6313 sp<AMessage> notify = mCodec->mNotify->dup(); 6314 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 6315 notify->setInt32("buffer-id", info->mBufferID); 6316 notify->setBuffer("buffer", info->mData); 6317 notify->setInt32("flags", flags); 6318 6319 reply->setInt32("buffer-id", info->mBufferID); 6320 6321 notify->setMessage("reply", reply); 6322 6323 notify->post(); 6324 6325 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 6326 6327 if (flags & OMX_BUFFERFLAG_EOS) { 6328 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 6329 6330 sp<AMessage> notify = mCodec->mNotify->dup(); 6331 notify->setInt32("what", CodecBase::kWhatEOS); 6332 notify->setInt32("err", mCodec->mInputEOSResult); 6333 notify->post(); 6334 6335 mCodec->mPortEOS[kPortIndexOutput] = true; 6336 } 6337 break; 6338 } 6339 6340 case FREE_BUFFERS: 6341 err = mCodec->freeBuffer(kPortIndexOutput, index); 6342 if (err != OK) { 6343 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6344 return true; 6345 } 6346 break; 6347 6348 default: 6349 ALOGE("Invalid port mode: %d", mode); 6350 return false; 6351 } 6352 6353 return true; 6354} 6355 6356void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 6357 IOMX::buffer_id bufferID; 6358 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 6359 ssize_t index; 6360 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6361 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6362 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 6363 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6364 mCodec->dumpBuffers(kPortIndexOutput); 6365 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6366 return; 6367 } 6368 6369 android_native_rect_t crop; 6370 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom) 6371 && memcmp(&crop, &mCodec->mLastNativeWindowCrop, sizeof(crop)) != 0) { 6372 mCodec->mLastNativeWindowCrop = crop; 6373 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 6374 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 6375 } 6376 6377 int32_t dataSpace; 6378 if (msg->findInt32("dataspace", &dataSpace) 6379 && dataSpace != mCodec->mLastNativeWindowDataSpace) { 6380 status_t err = native_window_set_buffers_data_space( 6381 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 6382 mCodec->mLastNativeWindowDataSpace = dataSpace; 6383 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 6384 } 6385 6386 int32_t render; 6387 if (mCodec->mNativeWindow != NULL 6388 && msg->findInt32("render", &render) && render != 0 6389 && info->mData != NULL && info->mData->size() != 0) { 6390 ATRACE_NAME("render"); 6391 // The client wants this buffer to be rendered. 6392 6393 // save buffers sent to the surface so we can get render time when they return 6394 int64_t mediaTimeUs = -1; 6395 info->mData->meta()->findInt64("timeUs", &mediaTimeUs); 6396 if (mediaTimeUs >= 0) { 6397 mCodec->mRenderTracker.onFrameQueued( 6398 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 6399 } 6400 6401 int64_t timestampNs = 0; 6402 if (!msg->findInt64("timestampNs", ×tampNs)) { 6403 // use media timestamp if client did not request a specific render timestamp 6404 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 6405 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 6406 timestampNs *= 1000; 6407 } 6408 } 6409 6410 status_t err; 6411 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 6412 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 6413 6414 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 6415 err = mCodec->mNativeWindow->queueBuffer( 6416 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 6417 info->mFenceFd = -1; 6418 if (err == OK) { 6419 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 6420 } else { 6421 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 6422 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6423 info->mStatus = BufferInfo::OWNED_BY_US; 6424 // keeping read fence as write fence to avoid clobbering 6425 info->mIsReadFence = false; 6426 } 6427 } else { 6428 if (mCodec->mNativeWindow != NULL && 6429 (info->mData == NULL || info->mData->size() != 0)) { 6430 // move read fence into write fence to avoid clobbering 6431 info->mIsReadFence = false; 6432 ATRACE_NAME("frame-drop"); 6433 } 6434 info->mStatus = BufferInfo::OWNED_BY_US; 6435 } 6436 6437 PortMode mode = getPortMode(kPortIndexOutput); 6438 6439 switch (mode) { 6440 case KEEP_BUFFERS: 6441 { 6442 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 6443 6444 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6445 // We cannot resubmit the buffer we just rendered, dequeue 6446 // the spare instead. 6447 6448 info = mCodec->dequeueBufferFromNativeWindow(); 6449 } 6450 break; 6451 } 6452 6453 case RESUBMIT_BUFFERS: 6454 { 6455 if (!mCodec->mPortEOS[kPortIndexOutput]) { 6456 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6457 // We cannot resubmit the buffer we just rendered, dequeue 6458 // the spare instead. 6459 6460 info = mCodec->dequeueBufferFromNativeWindow(); 6461 } 6462 6463 if (info != NULL) { 6464 ALOGV("[%s] calling fillBuffer %u", 6465 mCodec->mComponentName.c_str(), info->mBufferID); 6466 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 6467 status_t err = mCodec->mOMX->fillBuffer( 6468 mCodec->mNode, info->mBufferID, info->mFenceFd); 6469 info->mFenceFd = -1; 6470 if (err == OK) { 6471 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6472 } else { 6473 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6474 } 6475 } 6476 } 6477 break; 6478 } 6479 6480 case FREE_BUFFERS: 6481 { 6482 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 6483 if (err != OK) { 6484 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6485 } 6486 break; 6487 } 6488 6489 default: 6490 ALOGE("Invalid port mode: %d", mode); 6491 return; 6492 } 6493} 6494 6495//////////////////////////////////////////////////////////////////////////////// 6496 6497ACodec::UninitializedState::UninitializedState(ACodec *codec) 6498 : BaseState(codec) { 6499} 6500 6501void ACodec::UninitializedState::stateEntered() { 6502 ALOGV("Now uninitialized"); 6503 6504 if (mDeathNotifier != NULL) { 6505 mCodec->mNodeBinder->unlinkToDeath(mDeathNotifier); 6506 mDeathNotifier.clear(); 6507 } 6508 6509 mCodec->mUsingNativeWindow = false; 6510 mCodec->mNativeWindow.clear(); 6511 mCodec->mNativeWindowUsageBits = 0; 6512 mCodec->mNode = 0; 6513 mCodec->mOMX.clear(); 6514 mCodec->mQuirks = 0; 6515 mCodec->mFlags = 0; 6516 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 6517 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 6518 mCodec->mConverter[0].clear(); 6519 mCodec->mConverter[1].clear(); 6520 mCodec->mComponentName.clear(); 6521} 6522 6523bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 6524 bool handled = false; 6525 6526 switch (msg->what()) { 6527 case ACodec::kWhatSetup: 6528 { 6529 onSetup(msg); 6530 6531 handled = true; 6532 break; 6533 } 6534 6535 case ACodec::kWhatAllocateComponent: 6536 { 6537 onAllocateComponent(msg); 6538 handled = true; 6539 break; 6540 } 6541 6542 case ACodec::kWhatShutdown: 6543 { 6544 int32_t keepComponentAllocated; 6545 CHECK(msg->findInt32( 6546 "keepComponentAllocated", &keepComponentAllocated)); 6547 ALOGW_IF(keepComponentAllocated, 6548 "cannot keep component allocated on shutdown in Uninitialized state"); 6549 6550 sp<AMessage> notify = mCodec->mNotify->dup(); 6551 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6552 notify->post(); 6553 6554 handled = true; 6555 break; 6556 } 6557 6558 case ACodec::kWhatFlush: 6559 { 6560 sp<AMessage> notify = mCodec->mNotify->dup(); 6561 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6562 notify->post(); 6563 6564 handled = true; 6565 break; 6566 } 6567 6568 case ACodec::kWhatReleaseCodecInstance: 6569 { 6570 // nothing to do, as we have already signaled shutdown 6571 handled = true; 6572 break; 6573 } 6574 6575 default: 6576 return BaseState::onMessageReceived(msg); 6577 } 6578 6579 return handled; 6580} 6581 6582void ACodec::UninitializedState::onSetup( 6583 const sp<AMessage> &msg) { 6584 if (onAllocateComponent(msg) 6585 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6586 mCodec->mLoadedState->onStart(); 6587 } 6588} 6589 6590bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6591 ALOGV("onAllocateComponent"); 6592 6593 CHECK(mCodec->mNode == 0); 6594 6595 OMXClient client; 6596 if (client.connect() != OK) { 6597 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6598 return false; 6599 } 6600 6601 sp<IOMX> omx = client.interface(); 6602 6603 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6604 6605 Vector<AString> matchingCodecs; 6606 6607 AString mime; 6608 6609 AString componentName; 6610 uint32_t quirks = 0; 6611 int32_t encoder = false; 6612 if (msg->findString("componentName", &componentName)) { 6613 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6614 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6615 matchingCodecs.add(componentName); 6616 } 6617 } else { 6618 CHECK(msg->findString("mime", &mime)); 6619 6620 if (!msg->findInt32("encoder", &encoder)) { 6621 encoder = false; 6622 } 6623 6624 MediaCodecList::findMatchingCodecs( 6625 mime.c_str(), 6626 encoder, // createEncoder 6627 0, // flags 6628 &matchingCodecs); 6629 } 6630 6631 sp<CodecObserver> observer = new CodecObserver; 6632 IOMX::node_id node = 0; 6633 6634 status_t err = NAME_NOT_FOUND; 6635 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6636 ++matchIndex) { 6637 componentName = matchingCodecs[matchIndex]; 6638 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6639 6640 pid_t tid = gettid(); 6641 int prevPriority = androidGetThreadPriority(tid); 6642 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6643 err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node); 6644 androidSetThreadPriority(tid, prevPriority); 6645 6646 if (err == OK) { 6647 break; 6648 } else { 6649 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6650 } 6651 6652 node = 0; 6653 } 6654 6655 if (node == 0) { 6656 if (!mime.empty()) { 6657 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6658 encoder ? "en" : "de", mime.c_str(), err); 6659 } else { 6660 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6661 } 6662 6663 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6664 return false; 6665 } 6666 6667 mDeathNotifier = new DeathNotifier(notify); 6668 if (mCodec->mNodeBinder == NULL || 6669 mCodec->mNodeBinder->linkToDeath(mDeathNotifier) != OK) { 6670 // This was a local binder, if it dies so do we, we won't care 6671 // about any notifications in the afterlife. 6672 mDeathNotifier.clear(); 6673 } 6674 6675 notify = new AMessage(kWhatOMXMessageList, mCodec); 6676 observer->setNotificationMessage(notify); 6677 6678 mCodec->mComponentName = componentName; 6679 mCodec->mRenderTracker.setComponentName(componentName); 6680 mCodec->mFlags = 0; 6681 6682 if (componentName.endsWith(".secure")) { 6683 mCodec->mFlags |= kFlagIsSecure; 6684 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6685 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6686 } 6687 6688 mCodec->mQuirks = quirks; 6689 mCodec->mOMX = omx; 6690 mCodec->mNode = node; 6691 6692 { 6693 sp<AMessage> notify = mCodec->mNotify->dup(); 6694 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6695 notify->setString("componentName", mCodec->mComponentName.c_str()); 6696 notify->post(); 6697 } 6698 6699 mCodec->changeState(mCodec->mLoadedState); 6700 6701 return true; 6702} 6703 6704//////////////////////////////////////////////////////////////////////////////// 6705 6706ACodec::LoadedState::LoadedState(ACodec *codec) 6707 : BaseState(codec) { 6708} 6709 6710void ACodec::LoadedState::stateEntered() { 6711 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6712 6713 mCodec->mPortEOS[kPortIndexInput] = 6714 mCodec->mPortEOS[kPortIndexOutput] = false; 6715 6716 mCodec->mInputEOSResult = OK; 6717 6718 mCodec->mDequeueCounter = 0; 6719 mCodec->mMetadataBuffersToSubmit = 0; 6720 mCodec->mRepeatFrameDelayUs = -1ll; 6721 mCodec->mInputFormat.clear(); 6722 mCodec->mOutputFormat.clear(); 6723 mCodec->mBaseOutputFormat.clear(); 6724 6725 if (mCodec->mShutdownInProgress) { 6726 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6727 6728 mCodec->mShutdownInProgress = false; 6729 mCodec->mKeepComponentAllocated = false; 6730 6731 onShutdown(keepComponentAllocated); 6732 } 6733 mCodec->mExplicitShutdown = false; 6734 6735 mCodec->processDeferredMessages(); 6736} 6737 6738void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6739 if (!keepComponentAllocated) { 6740 (void)mCodec->mOMX->freeNode(mCodec->mNode); 6741 6742 mCodec->changeState(mCodec->mUninitializedState); 6743 } 6744 6745 if (mCodec->mExplicitShutdown) { 6746 sp<AMessage> notify = mCodec->mNotify->dup(); 6747 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6748 notify->post(); 6749 mCodec->mExplicitShutdown = false; 6750 } 6751} 6752 6753bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6754 bool handled = false; 6755 6756 switch (msg->what()) { 6757 case ACodec::kWhatConfigureComponent: 6758 { 6759 onConfigureComponent(msg); 6760 handled = true; 6761 break; 6762 } 6763 6764 case ACodec::kWhatCreateInputSurface: 6765 { 6766 onCreateInputSurface(msg); 6767 handled = true; 6768 break; 6769 } 6770 6771 case ACodec::kWhatSetInputSurface: 6772 { 6773 onSetInputSurface(msg); 6774 handled = true; 6775 break; 6776 } 6777 6778 case ACodec::kWhatStart: 6779 { 6780 onStart(); 6781 handled = true; 6782 break; 6783 } 6784 6785 case ACodec::kWhatShutdown: 6786 { 6787 int32_t keepComponentAllocated; 6788 CHECK(msg->findInt32( 6789 "keepComponentAllocated", &keepComponentAllocated)); 6790 6791 mCodec->mExplicitShutdown = true; 6792 onShutdown(keepComponentAllocated); 6793 6794 handled = true; 6795 break; 6796 } 6797 6798 case ACodec::kWhatFlush: 6799 { 6800 sp<AMessage> notify = mCodec->mNotify->dup(); 6801 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6802 notify->post(); 6803 6804 handled = true; 6805 break; 6806 } 6807 6808 default: 6809 return BaseState::onMessageReceived(msg); 6810 } 6811 6812 return handled; 6813} 6814 6815bool ACodec::LoadedState::onConfigureComponent( 6816 const sp<AMessage> &msg) { 6817 ALOGV("onConfigureComponent"); 6818 6819 CHECK(mCodec->mNode != 0); 6820 6821 status_t err = OK; 6822 AString mime; 6823 if (!msg->findString("mime", &mime)) { 6824 err = BAD_VALUE; 6825 } else { 6826 err = mCodec->configureCodec(mime.c_str(), msg); 6827 } 6828 if (err != OK) { 6829 ALOGE("[%s] configureCodec returning error %d", 6830 mCodec->mComponentName.c_str(), err); 6831 6832 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6833 return false; 6834 } 6835 6836 { 6837 sp<AMessage> notify = mCodec->mNotify->dup(); 6838 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6839 notify->setMessage("input-format", mCodec->mInputFormat); 6840 notify->setMessage("output-format", mCodec->mOutputFormat); 6841 notify->post(); 6842 } 6843 6844 return true; 6845} 6846 6847status_t ACodec::LoadedState::setupInputSurface() { 6848 status_t err = OK; 6849 6850 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6851 err = mCodec->mOMX->setInternalOption( 6852 mCodec->mNode, 6853 kPortIndexInput, 6854 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 6855 &mCodec->mRepeatFrameDelayUs, 6856 sizeof(mCodec->mRepeatFrameDelayUs)); 6857 6858 if (err != OK) { 6859 ALOGE("[%s] Unable to configure option to repeat previous " 6860 "frames (err %d)", 6861 mCodec->mComponentName.c_str(), 6862 err); 6863 return err; 6864 } 6865 } 6866 6867 if (mCodec->mMaxPtsGapUs > 0ll) { 6868 err = mCodec->mOMX->setInternalOption( 6869 mCodec->mNode, 6870 kPortIndexInput, 6871 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 6872 &mCodec->mMaxPtsGapUs, 6873 sizeof(mCodec->mMaxPtsGapUs)); 6874 6875 if (err != OK) { 6876 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6877 mCodec->mComponentName.c_str(), 6878 err); 6879 return err; 6880 } 6881 } 6882 6883 if (mCodec->mMaxFps > 0) { 6884 err = mCodec->mOMX->setInternalOption( 6885 mCodec->mNode, 6886 kPortIndexInput, 6887 IOMX::INTERNAL_OPTION_MAX_FPS, 6888 &mCodec->mMaxFps, 6889 sizeof(mCodec->mMaxFps)); 6890 6891 if (err != OK) { 6892 ALOGE("[%s] Unable to configure max fps (err %d)", 6893 mCodec->mComponentName.c_str(), 6894 err); 6895 return err; 6896 } 6897 } 6898 6899 if (mCodec->mTimePerCaptureUs > 0ll 6900 && mCodec->mTimePerFrameUs > 0ll) { 6901 int64_t timeLapse[2]; 6902 timeLapse[0] = mCodec->mTimePerFrameUs; 6903 timeLapse[1] = mCodec->mTimePerCaptureUs; 6904 err = mCodec->mOMX->setInternalOption( 6905 mCodec->mNode, 6906 kPortIndexInput, 6907 IOMX::INTERNAL_OPTION_TIME_LAPSE, 6908 &timeLapse[0], 6909 sizeof(timeLapse)); 6910 6911 if (err != OK) { 6912 ALOGE("[%s] Unable to configure time lapse (err %d)", 6913 mCodec->mComponentName.c_str(), 6914 err); 6915 return err; 6916 } 6917 } 6918 6919 if (mCodec->mCreateInputBuffersSuspended) { 6920 bool suspend = true; 6921 err = mCodec->mOMX->setInternalOption( 6922 mCodec->mNode, 6923 kPortIndexInput, 6924 IOMX::INTERNAL_OPTION_SUSPEND, 6925 &suspend, 6926 sizeof(suspend)); 6927 6928 if (err != OK) { 6929 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6930 mCodec->mComponentName.c_str(), 6931 err); 6932 return err; 6933 } 6934 } 6935 6936 uint32_t usageBits; 6937 if (mCodec->mOMX->getParameter( 6938 mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6939 &usageBits, sizeof(usageBits)) == OK) { 6940 mCodec->mInputFormat->setInt32( 6941 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6942 } 6943 6944 sp<ABuffer> colorAspectsBuffer; 6945 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6946 err = mCodec->mOMX->setInternalOption( 6947 mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS, 6948 colorAspectsBuffer->base(), colorAspectsBuffer->capacity()); 6949 if (err != OK) { 6950 ALOGE("[%s] Unable to configure color aspects (err %d)", 6951 mCodec->mComponentName.c_str(), err); 6952 return err; 6953 } 6954 } 6955 return OK; 6956} 6957 6958void ACodec::LoadedState::onCreateInputSurface( 6959 const sp<AMessage> & /* msg */) { 6960 ALOGV("onCreateInputSurface"); 6961 6962 sp<AMessage> notify = mCodec->mNotify->dup(); 6963 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6964 6965 android_dataspace dataSpace; 6966 status_t err = 6967 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6968 notify->setMessage("input-format", mCodec->mInputFormat); 6969 notify->setMessage("output-format", mCodec->mOutputFormat); 6970 6971 sp<IGraphicBufferProducer> bufferProducer; 6972 if (err == OK) { 6973 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6974 err = mCodec->mOMX->createInputSurface( 6975 mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, 6976 &mCodec->mInputMetadataType); 6977 // framework uses ANW buffers internally instead of gralloc handles 6978 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6979 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6980 } 6981 } 6982 6983 if (err == OK) { 6984 err = setupInputSurface(); 6985 } 6986 6987 if (err == OK) { 6988 notify->setObject("input-surface", 6989 new BufferProducerWrapper(bufferProducer)); 6990 } else { 6991 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6992 // the error through because it's in the "configured" state. We 6993 // send a kWhatInputSurfaceCreated with an error value instead. 6994 ALOGE("[%s] onCreateInputSurface returning error %d", 6995 mCodec->mComponentName.c_str(), err); 6996 notify->setInt32("err", err); 6997 } 6998 notify->post(); 6999} 7000 7001void ACodec::LoadedState::onSetInputSurface( 7002 const sp<AMessage> &msg) { 7003 ALOGV("onSetInputSurface"); 7004 7005 sp<AMessage> notify = mCodec->mNotify->dup(); 7006 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 7007 7008 sp<RefBase> obj; 7009 CHECK(msg->findObject("input-surface", &obj)); 7010 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 7011 7012 android_dataspace dataSpace; 7013 status_t err = 7014 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 7015 notify->setMessage("input-format", mCodec->mInputFormat); 7016 notify->setMessage("output-format", mCodec->mOutputFormat); 7017 7018 if (err == OK) { 7019 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 7020 err = mCodec->mOMX->setInputSurface( 7021 mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(), 7022 &mCodec->mInputMetadataType); 7023 // framework uses ANW buffers internally instead of gralloc handles 7024 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 7025 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 7026 } 7027 } 7028 7029 if (err == OK) { 7030 surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace); 7031 err = setupInputSurface(); 7032 } 7033 7034 if (err != OK) { 7035 // Can't use mCodec->signalError() here -- MediaCodec won't forward 7036 // the error through because it's in the "configured" state. We 7037 // send a kWhatInputSurfaceAccepted with an error value instead. 7038 ALOGE("[%s] onSetInputSurface returning error %d", 7039 mCodec->mComponentName.c_str(), err); 7040 notify->setInt32("err", err); 7041 } 7042 notify->post(); 7043} 7044 7045void ACodec::LoadedState::onStart() { 7046 ALOGV("onStart"); 7047 7048 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 7049 if (err != OK) { 7050 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7051 } else { 7052 mCodec->changeState(mCodec->mLoadedToIdleState); 7053 } 7054} 7055 7056//////////////////////////////////////////////////////////////////////////////// 7057 7058ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 7059 : BaseState(codec) { 7060} 7061 7062void ACodec::LoadedToIdleState::stateEntered() { 7063 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 7064 7065 status_t err; 7066 if ((err = allocateBuffers()) != OK) { 7067 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 7068 "(error 0x%08x)", 7069 err); 7070 7071 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7072 7073 mCodec->mOMX->sendCommand( 7074 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7075 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 7076 mCodec->freeBuffersOnPort(kPortIndexInput); 7077 } 7078 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 7079 mCodec->freeBuffersOnPort(kPortIndexOutput); 7080 } 7081 7082 mCodec->changeState(mCodec->mLoadedState); 7083 } 7084} 7085 7086status_t ACodec::LoadedToIdleState::allocateBuffers() { 7087 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 7088 7089 if (err != OK) { 7090 return err; 7091 } 7092 7093 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 7094} 7095 7096bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7097 switch (msg->what()) { 7098 case kWhatSetParameters: 7099 case kWhatShutdown: 7100 { 7101 mCodec->deferMessage(msg); 7102 return true; 7103 } 7104 7105 case kWhatSignalEndOfInputStream: 7106 { 7107 mCodec->onSignalEndOfInputStream(); 7108 return true; 7109 } 7110 7111 case kWhatResume: 7112 { 7113 // We'll be active soon enough. 7114 return true; 7115 } 7116 7117 case kWhatFlush: 7118 { 7119 // We haven't even started yet, so we're flushed alright... 7120 sp<AMessage> notify = mCodec->mNotify->dup(); 7121 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7122 notify->post(); 7123 return true; 7124 } 7125 7126 default: 7127 return BaseState::onMessageReceived(msg); 7128 } 7129} 7130 7131bool ACodec::LoadedToIdleState::onOMXEvent( 7132 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7133 switch (event) { 7134 case OMX_EventCmdComplete: 7135 { 7136 status_t err = OK; 7137 if (data1 != (OMX_U32)OMX_CommandStateSet 7138 || data2 != (OMX_U32)OMX_StateIdle) { 7139 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 7140 asString((OMX_COMMANDTYPE)data1), data1, 7141 asString((OMX_STATETYPE)data2), data2); 7142 err = FAILED_TRANSACTION; 7143 } 7144 7145 if (err == OK) { 7146 err = mCodec->mOMX->sendCommand( 7147 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting); 7148 } 7149 7150 if (err != OK) { 7151 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7152 } else { 7153 mCodec->changeState(mCodec->mIdleToExecutingState); 7154 } 7155 7156 return true; 7157 } 7158 7159 default: 7160 return BaseState::onOMXEvent(event, data1, data2); 7161 } 7162} 7163 7164//////////////////////////////////////////////////////////////////////////////// 7165 7166ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 7167 : BaseState(codec) { 7168} 7169 7170void ACodec::IdleToExecutingState::stateEntered() { 7171 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 7172} 7173 7174bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 7175 switch (msg->what()) { 7176 case kWhatSetParameters: 7177 case kWhatShutdown: 7178 { 7179 mCodec->deferMessage(msg); 7180 return true; 7181 } 7182 7183 case kWhatResume: 7184 { 7185 // We'll be active soon enough. 7186 return true; 7187 } 7188 7189 case kWhatFlush: 7190 { 7191 // We haven't even started yet, so we're flushed alright... 7192 sp<AMessage> notify = mCodec->mNotify->dup(); 7193 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7194 notify->post(); 7195 7196 return true; 7197 } 7198 7199 case kWhatSignalEndOfInputStream: 7200 { 7201 mCodec->onSignalEndOfInputStream(); 7202 return true; 7203 } 7204 7205 default: 7206 return BaseState::onMessageReceived(msg); 7207 } 7208} 7209 7210bool ACodec::IdleToExecutingState::onOMXEvent( 7211 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7212 switch (event) { 7213 case OMX_EventCmdComplete: 7214 { 7215 if (data1 != (OMX_U32)OMX_CommandStateSet 7216 || data2 != (OMX_U32)OMX_StateExecuting) { 7217 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 7218 asString((OMX_COMMANDTYPE)data1), data1, 7219 asString((OMX_STATETYPE)data2), data2); 7220 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7221 return true; 7222 } 7223 7224 mCodec->mExecutingState->resume(); 7225 mCodec->changeState(mCodec->mExecutingState); 7226 7227 return true; 7228 } 7229 7230 default: 7231 return BaseState::onOMXEvent(event, data1, data2); 7232 } 7233} 7234 7235//////////////////////////////////////////////////////////////////////////////// 7236 7237ACodec::ExecutingState::ExecutingState(ACodec *codec) 7238 : BaseState(codec), 7239 mActive(false) { 7240} 7241 7242ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 7243 OMX_U32 /* portIndex */) { 7244 return RESUBMIT_BUFFERS; 7245} 7246 7247void ACodec::ExecutingState::submitOutputMetaBuffers() { 7248 // submit as many buffers as there are input buffers with the codec 7249 // in case we are in port reconfiguring 7250 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 7251 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7252 7253 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 7254 if (mCodec->submitOutputMetadataBuffer() != OK) 7255 break; 7256 } 7257 } 7258 7259 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7260 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7261} 7262 7263void ACodec::ExecutingState::submitRegularOutputBuffers() { 7264 bool failed = false; 7265 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 7266 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 7267 7268 if (mCodec->mNativeWindow != NULL) { 7269 if (info->mStatus != BufferInfo::OWNED_BY_US 7270 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 7271 ALOGE("buffers should be owned by us or the surface"); 7272 failed = true; 7273 break; 7274 } 7275 7276 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 7277 continue; 7278 } 7279 } else { 7280 if (info->mStatus != BufferInfo::OWNED_BY_US) { 7281 ALOGE("buffers should be owned by us"); 7282 failed = true; 7283 break; 7284 } 7285 } 7286 7287 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 7288 7289 info->checkWriteFence("submitRegularOutputBuffers"); 7290 status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 7291 info->mFenceFd = -1; 7292 if (err != OK) { 7293 failed = true; 7294 break; 7295 } 7296 7297 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 7298 } 7299 7300 if (failed) { 7301 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7302 } 7303} 7304 7305void ACodec::ExecutingState::submitOutputBuffers() { 7306 submitRegularOutputBuffers(); 7307 if (mCodec->storingMetadataInDecodedBuffers()) { 7308 submitOutputMetaBuffers(); 7309 } 7310} 7311 7312void ACodec::ExecutingState::resume() { 7313 if (mActive) { 7314 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 7315 return; 7316 } 7317 7318 submitOutputBuffers(); 7319 7320 // Post all available input buffers 7321 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 7322 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 7323 } 7324 7325 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 7326 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7327 if (info->mStatus == BufferInfo::OWNED_BY_US) { 7328 postFillThisBuffer(info); 7329 } 7330 } 7331 7332 mActive = true; 7333} 7334 7335void ACodec::ExecutingState::stateEntered() { 7336 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 7337 7338 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7339 mCodec->processDeferredMessages(); 7340} 7341 7342bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 7343 bool handled = false; 7344 7345 switch (msg->what()) { 7346 case kWhatShutdown: 7347 { 7348 int32_t keepComponentAllocated; 7349 CHECK(msg->findInt32( 7350 "keepComponentAllocated", &keepComponentAllocated)); 7351 7352 mCodec->mShutdownInProgress = true; 7353 mCodec->mExplicitShutdown = true; 7354 mCodec->mKeepComponentAllocated = keepComponentAllocated; 7355 7356 mActive = false; 7357 7358 status_t err = mCodec->mOMX->sendCommand( 7359 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 7360 if (err != OK) { 7361 if (keepComponentAllocated) { 7362 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7363 } 7364 // TODO: do some recovery here. 7365 } else { 7366 mCodec->changeState(mCodec->mExecutingToIdleState); 7367 } 7368 7369 handled = true; 7370 break; 7371 } 7372 7373 case kWhatFlush: 7374 { 7375 ALOGV("[%s] ExecutingState flushing now " 7376 "(codec owns %zu/%zu input, %zu/%zu output).", 7377 mCodec->mComponentName.c_str(), 7378 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 7379 mCodec->mBuffers[kPortIndexInput].size(), 7380 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 7381 mCodec->mBuffers[kPortIndexOutput].size()); 7382 7383 mActive = false; 7384 7385 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL); 7386 if (err != OK) { 7387 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7388 } else { 7389 mCodec->changeState(mCodec->mFlushingState); 7390 } 7391 7392 handled = true; 7393 break; 7394 } 7395 7396 case kWhatResume: 7397 { 7398 resume(); 7399 7400 handled = true; 7401 break; 7402 } 7403 7404 case kWhatRequestIDRFrame: 7405 { 7406 status_t err = mCodec->requestIDRFrame(); 7407 if (err != OK) { 7408 ALOGW("Requesting an IDR frame failed."); 7409 } 7410 7411 handled = true; 7412 break; 7413 } 7414 7415 case kWhatSetParameters: 7416 { 7417 sp<AMessage> params; 7418 CHECK(msg->findMessage("params", ¶ms)); 7419 7420 status_t err = mCodec->setParameters(params); 7421 7422 sp<AMessage> reply; 7423 if (msg->findMessage("reply", &reply)) { 7424 reply->setInt32("err", err); 7425 reply->post(); 7426 } 7427 7428 handled = true; 7429 break; 7430 } 7431 7432 case ACodec::kWhatSignalEndOfInputStream: 7433 { 7434 mCodec->onSignalEndOfInputStream(); 7435 handled = true; 7436 break; 7437 } 7438 7439 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7440 case kWhatSubmitOutputMetadataBufferIfEOS: 7441 { 7442 if (mCodec->mPortEOS[kPortIndexInput] && 7443 !mCodec->mPortEOS[kPortIndexOutput]) { 7444 status_t err = mCodec->submitOutputMetadataBuffer(); 7445 if (err == OK) { 7446 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7447 } 7448 } 7449 return true; 7450 } 7451 7452 default: 7453 handled = BaseState::onMessageReceived(msg); 7454 break; 7455 } 7456 7457 return handled; 7458} 7459 7460status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 7461 int32_t videoBitrate; 7462 if (params->findInt32("video-bitrate", &videoBitrate)) { 7463 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 7464 InitOMXParams(&configParams); 7465 configParams.nPortIndex = kPortIndexOutput; 7466 configParams.nEncodeBitrate = videoBitrate; 7467 7468 status_t err = mOMX->setConfig( 7469 mNode, 7470 OMX_IndexConfigVideoBitrate, 7471 &configParams, 7472 sizeof(configParams)); 7473 7474 if (err != OK) { 7475 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 7476 videoBitrate, err); 7477 7478 return err; 7479 } 7480 } 7481 7482 int64_t skipFramesBeforeUs; 7483 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 7484 status_t err = 7485 mOMX->setInternalOption( 7486 mNode, 7487 kPortIndexInput, 7488 IOMX::INTERNAL_OPTION_START_TIME, 7489 &skipFramesBeforeUs, 7490 sizeof(skipFramesBeforeUs)); 7491 7492 if (err != OK) { 7493 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 7494 return err; 7495 } 7496 } 7497 7498 int32_t dropInputFrames; 7499 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 7500 bool suspend = dropInputFrames != 0; 7501 7502 status_t err = 7503 mOMX->setInternalOption( 7504 mNode, 7505 kPortIndexInput, 7506 IOMX::INTERNAL_OPTION_SUSPEND, 7507 &suspend, 7508 sizeof(suspend)); 7509 7510 if (err != OK) { 7511 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 7512 return err; 7513 } 7514 } 7515 7516 int32_t dummy; 7517 if (params->findInt32("request-sync", &dummy)) { 7518 status_t err = requestIDRFrame(); 7519 7520 if (err != OK) { 7521 ALOGE("Requesting a sync frame failed w/ err %d", err); 7522 return err; 7523 } 7524 } 7525 7526 float rate; 7527 if (params->findFloat("operating-rate", &rate) && rate > 0) { 7528 status_t err = setOperatingRate(rate, mIsVideo); 7529 if (err != OK) { 7530 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 7531 return err; 7532 } 7533 } 7534 7535 int32_t intraRefreshPeriod = 0; 7536 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 7537 && intraRefreshPeriod > 0) { 7538 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 7539 if (err != OK) { 7540 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 7541 mComponentName.c_str()); 7542 err = OK; 7543 } 7544 } 7545 7546 status_t err = configureTemporalLayers(params, false /* inConfigure */, mOutputFormat); 7547 if (err != OK) { 7548 err = OK; // ignore failure 7549 } 7550 7551 return err; 7552} 7553 7554void ACodec::onSignalEndOfInputStream() { 7555 sp<AMessage> notify = mNotify->dup(); 7556 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 7557 7558 status_t err = mOMX->signalEndOfInputStream(mNode); 7559 if (err != OK) { 7560 notify->setInt32("err", err); 7561 } 7562 notify->post(); 7563} 7564 7565bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 7566 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7567 return true; 7568} 7569 7570bool ACodec::ExecutingState::onOMXEvent( 7571 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7572 switch (event) { 7573 case OMX_EventPortSettingsChanged: 7574 { 7575 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 7576 7577 mCodec->onOutputFormatChanged(); 7578 7579 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 7580 mCodec->mMetadataBuffersToSubmit = 0; 7581 CHECK_EQ(mCodec->mOMX->sendCommand( 7582 mCodec->mNode, 7583 OMX_CommandPortDisable, kPortIndexOutput), 7584 (status_t)OK); 7585 7586 mCodec->freeOutputBuffersNotOwnedByComponent(); 7587 7588 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7589 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7590 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7591 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7592 mCodec->mComponentName.c_str(), data2); 7593 } 7594 7595 return true; 7596 } 7597 7598 case OMX_EventBufferFlag: 7599 { 7600 return true; 7601 } 7602 7603 default: 7604 return BaseState::onOMXEvent(event, data1, data2); 7605 } 7606} 7607 7608//////////////////////////////////////////////////////////////////////////////// 7609 7610ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7611 ACodec *codec) 7612 : BaseState(codec) { 7613} 7614 7615ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7616 OMX_U32 portIndex) { 7617 if (portIndex == kPortIndexOutput) { 7618 return FREE_BUFFERS; 7619 } 7620 7621 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7622 7623 return RESUBMIT_BUFFERS; 7624} 7625 7626bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7627 const sp<AMessage> &msg) { 7628 bool handled = false; 7629 7630 switch (msg->what()) { 7631 case kWhatFlush: 7632 case kWhatShutdown: 7633 case kWhatResume: 7634 case kWhatSetParameters: 7635 { 7636 if (msg->what() == kWhatResume) { 7637 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7638 } 7639 7640 mCodec->deferMessage(msg); 7641 handled = true; 7642 break; 7643 } 7644 7645 default: 7646 handled = BaseState::onMessageReceived(msg); 7647 break; 7648 } 7649 7650 return handled; 7651} 7652 7653void ACodec::OutputPortSettingsChangedState::stateEntered() { 7654 ALOGV("[%s] Now handling output port settings change", 7655 mCodec->mComponentName.c_str()); 7656} 7657 7658bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7659 int64_t mediaTimeUs, nsecs_t systemNano) { 7660 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7661 return true; 7662} 7663 7664bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7665 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7666 switch (event) { 7667 case OMX_EventCmdComplete: 7668 { 7669 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7670 if (data2 != (OMX_U32)kPortIndexOutput) { 7671 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7672 return false; 7673 } 7674 7675 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7676 7677 status_t err = OK; 7678 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7679 ALOGE("disabled port should be empty, but has %zu buffers", 7680 mCodec->mBuffers[kPortIndexOutput].size()); 7681 err = FAILED_TRANSACTION; 7682 } else { 7683 mCodec->mDealer[kPortIndexOutput].clear(); 7684 } 7685 7686 if (err == OK) { 7687 err = mCodec->mOMX->sendCommand( 7688 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput); 7689 } 7690 7691 if (err == OK) { 7692 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7693 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7694 "reconfiguration: (%d)", err); 7695 } 7696 7697 if (err != OK) { 7698 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7699 7700 // This is technically not correct, but appears to be 7701 // the only way to free the component instance. 7702 // Controlled transitioning from excecuting->idle 7703 // and idle->loaded seem impossible probably because 7704 // the output port never finishes re-enabling. 7705 mCodec->mShutdownInProgress = true; 7706 mCodec->mKeepComponentAllocated = false; 7707 mCodec->changeState(mCodec->mLoadedState); 7708 } 7709 7710 return true; 7711 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7712 if (data2 != (OMX_U32)kPortIndexOutput) { 7713 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7714 return false; 7715 } 7716 7717 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7718 7719 if (mCodec->mExecutingState->active()) { 7720 mCodec->mExecutingState->submitOutputBuffers(); 7721 } 7722 7723 mCodec->changeState(mCodec->mExecutingState); 7724 7725 return true; 7726 } 7727 7728 return false; 7729 } 7730 7731 default: 7732 return false; 7733 } 7734} 7735 7736//////////////////////////////////////////////////////////////////////////////// 7737 7738ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7739 : BaseState(codec), 7740 mComponentNowIdle(false) { 7741} 7742 7743bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7744 bool handled = false; 7745 7746 switch (msg->what()) { 7747 case kWhatFlush: 7748 { 7749 // Don't send me a flush request if you previously wanted me 7750 // to shutdown. 7751 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7752 break; 7753 } 7754 7755 case kWhatShutdown: 7756 { 7757 // We're already doing that... 7758 7759 handled = true; 7760 break; 7761 } 7762 7763 default: 7764 handled = BaseState::onMessageReceived(msg); 7765 break; 7766 } 7767 7768 return handled; 7769} 7770 7771void ACodec::ExecutingToIdleState::stateEntered() { 7772 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7773 7774 mComponentNowIdle = false; 7775 mCodec->mLastOutputFormat.clear(); 7776} 7777 7778bool ACodec::ExecutingToIdleState::onOMXEvent( 7779 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7780 switch (event) { 7781 case OMX_EventCmdComplete: 7782 { 7783 if (data1 != (OMX_U32)OMX_CommandStateSet 7784 || data2 != (OMX_U32)OMX_StateIdle) { 7785 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7786 asString((OMX_COMMANDTYPE)data1), data1, 7787 asString((OMX_STATETYPE)data2), data2); 7788 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7789 return true; 7790 } 7791 7792 mComponentNowIdle = true; 7793 7794 changeStateIfWeOwnAllBuffers(); 7795 7796 return true; 7797 } 7798 7799 case OMX_EventPortSettingsChanged: 7800 case OMX_EventBufferFlag: 7801 { 7802 // We're shutting down and don't care about this anymore. 7803 return true; 7804 } 7805 7806 default: 7807 return BaseState::onOMXEvent(event, data1, data2); 7808 } 7809} 7810 7811void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7812 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7813 status_t err = mCodec->mOMX->sendCommand( 7814 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7815 if (err == OK) { 7816 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7817 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7818 if (err == OK) { 7819 err = err2; 7820 } 7821 } 7822 7823 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7824 && mCodec->mNativeWindow != NULL) { 7825 // We push enough 1x1 blank buffers to ensure that one of 7826 // them has made it to the display. This allows the OMX 7827 // component teardown to zero out any protected buffers 7828 // without the risk of scanning out one of those buffers. 7829 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7830 } 7831 7832 if (err != OK) { 7833 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7834 return; 7835 } 7836 7837 mCodec->changeState(mCodec->mIdleToLoadedState); 7838 } 7839} 7840 7841void ACodec::ExecutingToIdleState::onInputBufferFilled( 7842 const sp<AMessage> &msg) { 7843 BaseState::onInputBufferFilled(msg); 7844 7845 changeStateIfWeOwnAllBuffers(); 7846} 7847 7848void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7849 const sp<AMessage> &msg) { 7850 BaseState::onOutputBufferDrained(msg); 7851 7852 changeStateIfWeOwnAllBuffers(); 7853} 7854 7855//////////////////////////////////////////////////////////////////////////////// 7856 7857ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7858 : BaseState(codec) { 7859} 7860 7861bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7862 bool handled = false; 7863 7864 switch (msg->what()) { 7865 case kWhatShutdown: 7866 { 7867 // We're already doing that... 7868 7869 handled = true; 7870 break; 7871 } 7872 7873 case kWhatFlush: 7874 { 7875 // Don't send me a flush request if you previously wanted me 7876 // to shutdown. 7877 ALOGE("Got flush request in IdleToLoadedState"); 7878 break; 7879 } 7880 7881 default: 7882 handled = BaseState::onMessageReceived(msg); 7883 break; 7884 } 7885 7886 return handled; 7887} 7888 7889void ACodec::IdleToLoadedState::stateEntered() { 7890 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7891} 7892 7893bool ACodec::IdleToLoadedState::onOMXEvent( 7894 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7895 switch (event) { 7896 case OMX_EventCmdComplete: 7897 { 7898 if (data1 != (OMX_U32)OMX_CommandStateSet 7899 || data2 != (OMX_U32)OMX_StateLoaded) { 7900 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7901 asString((OMX_COMMANDTYPE)data1), data1, 7902 asString((OMX_STATETYPE)data2), data2); 7903 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7904 return true; 7905 } 7906 7907 mCodec->changeState(mCodec->mLoadedState); 7908 7909 return true; 7910 } 7911 7912 default: 7913 return BaseState::onOMXEvent(event, data1, data2); 7914 } 7915} 7916 7917//////////////////////////////////////////////////////////////////////////////// 7918 7919ACodec::FlushingState::FlushingState(ACodec *codec) 7920 : BaseState(codec) { 7921} 7922 7923void ACodec::FlushingState::stateEntered() { 7924 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7925 7926 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7927} 7928 7929bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7930 bool handled = false; 7931 7932 switch (msg->what()) { 7933 case kWhatShutdown: 7934 { 7935 mCodec->deferMessage(msg); 7936 break; 7937 } 7938 7939 case kWhatFlush: 7940 { 7941 // We're already doing this right now. 7942 handled = true; 7943 break; 7944 } 7945 7946 default: 7947 handled = BaseState::onMessageReceived(msg); 7948 break; 7949 } 7950 7951 return handled; 7952} 7953 7954bool ACodec::FlushingState::onOMXEvent( 7955 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7956 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7957 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7958 7959 switch (event) { 7960 case OMX_EventCmdComplete: 7961 { 7962 if (data1 != (OMX_U32)OMX_CommandFlush) { 7963 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7964 asString((OMX_COMMANDTYPE)data1), data1, data2); 7965 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7966 return true; 7967 } 7968 7969 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7970 if (mFlushComplete[data2]) { 7971 ALOGW("Flush already completed for %s port", 7972 data2 == kPortIndexInput ? "input" : "output"); 7973 return true; 7974 } 7975 mFlushComplete[data2] = true; 7976 7977 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7978 changeStateIfWeOwnAllBuffers(); 7979 } 7980 } else if (data2 == OMX_ALL) { 7981 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7982 ALOGW("received flush complete event for OMX_ALL before ports have been" 7983 "flushed (%d/%d)", 7984 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7985 return false; 7986 } 7987 7988 changeStateIfWeOwnAllBuffers(); 7989 } else { 7990 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7991 } 7992 7993 return true; 7994 } 7995 7996 case OMX_EventPortSettingsChanged: 7997 { 7998 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7999 msg->setInt32("type", omx_message::EVENT); 8000 msg->setInt32("node", mCodec->mNode); 8001 msg->setInt32("event", event); 8002 msg->setInt32("data1", data1); 8003 msg->setInt32("data2", data2); 8004 8005 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 8006 mCodec->mComponentName.c_str()); 8007 8008 mCodec->deferMessage(msg); 8009 8010 return true; 8011 } 8012 8013 default: 8014 return BaseState::onOMXEvent(event, data1, data2); 8015 } 8016 8017 return true; 8018} 8019 8020void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 8021 BaseState::onOutputBufferDrained(msg); 8022 8023 changeStateIfWeOwnAllBuffers(); 8024} 8025 8026void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 8027 BaseState::onInputBufferFilled(msg); 8028 8029 changeStateIfWeOwnAllBuffers(); 8030} 8031 8032void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 8033 if (mFlushComplete[kPortIndexInput] 8034 && mFlushComplete[kPortIndexOutput] 8035 && mCodec->allYourBuffersAreBelongToUs()) { 8036 // We now own all buffers except possibly those still queued with 8037 // the native window for rendering. Let's get those back as well. 8038 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 8039 8040 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 8041 8042 sp<AMessage> notify = mCodec->mNotify->dup(); 8043 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 8044 notify->post(); 8045 8046 mCodec->mPortEOS[kPortIndexInput] = 8047 mCodec->mPortEOS[kPortIndexOutput] = false; 8048 8049 mCodec->mInputEOSResult = OK; 8050 8051 if (mCodec->mSkipCutBuffer != NULL) { 8052 mCodec->mSkipCutBuffer->clear(); 8053 } 8054 8055 mCodec->changeState(mCodec->mExecutingState); 8056 } 8057} 8058 8059status_t ACodec::queryCapabilities( 8060 const AString &name, const AString &mime, bool isEncoder, 8061 sp<MediaCodecInfo::Capabilities> *caps) { 8062 (*caps).clear(); 8063 const char *role = getComponentRole(isEncoder, mime.c_str()); 8064 if (role == NULL) { 8065 return BAD_VALUE; 8066 } 8067 8068 OMXClient client; 8069 status_t err = client.connect(); 8070 if (err != OK) { 8071 return err; 8072 } 8073 8074 sp<IOMX> omx = client.interface(); 8075 sp<CodecObserver> observer = new CodecObserver; 8076 IOMX::node_id node = 0; 8077 8078 err = omx->allocateNode(name.c_str(), observer, NULL, &node); 8079 if (err != OK) { 8080 client.disconnect(); 8081 return err; 8082 } 8083 8084 err = setComponentRole(omx, node, role); 8085 if (err != OK) { 8086 omx->freeNode(node); 8087 client.disconnect(); 8088 return err; 8089 } 8090 8091 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 8092 bool isVideo = mime.startsWithIgnoreCase("video/"); 8093 8094 if (isVideo) { 8095 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 8096 InitOMXParams(¶m); 8097 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 8098 8099 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 8100 param.nProfileIndex = index; 8101 status_t err = omx->getParameter( 8102 node, OMX_IndexParamVideoProfileLevelQuerySupported, 8103 ¶m, sizeof(param)); 8104 if (err != OK) { 8105 break; 8106 } 8107 builder->addProfileLevel(param.eProfile, param.eLevel); 8108 8109 if (index == kMaxIndicesToCheck) { 8110 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 8111 name.c_str(), index, 8112 param.eProfile, param.eLevel); 8113 } 8114 } 8115 8116 // Color format query 8117 // return colors in the order reported by the OMX component 8118 // prefix "flexible" standard ones with the flexible equivalent 8119 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 8120 InitOMXParams(&portFormat); 8121 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 8122 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 8123 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 8124 portFormat.nIndex = index; 8125 status_t err = omx->getParameter( 8126 node, OMX_IndexParamVideoPortFormat, 8127 &portFormat, sizeof(portFormat)); 8128 if (err != OK) { 8129 break; 8130 } 8131 8132 OMX_U32 flexibleEquivalent; 8133 if (isFlexibleColorFormat( 8134 omx, node, portFormat.eColorFormat, false /* usingNativeWindow */, 8135 &flexibleEquivalent)) { 8136 bool marked = false; 8137 for (size_t i = 0; i < supportedColors.size(); ++i) { 8138 if (supportedColors[i] == flexibleEquivalent) { 8139 marked = true; 8140 break; 8141 } 8142 } 8143 if (!marked) { 8144 supportedColors.push(flexibleEquivalent); 8145 builder->addColorFormat(flexibleEquivalent); 8146 } 8147 } 8148 supportedColors.push(portFormat.eColorFormat); 8149 builder->addColorFormat(portFormat.eColorFormat); 8150 8151 if (index == kMaxIndicesToCheck) { 8152 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 8153 name.c_str(), index, 8154 asString(portFormat.eColorFormat), portFormat.eColorFormat); 8155 } 8156 } 8157 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 8158 // More audio codecs if they have profiles. 8159 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 8160 InitOMXParams(¶m); 8161 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 8162 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 8163 param.nProfileIndex = index; 8164 status_t err = omx->getParameter( 8165 node, (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 8166 ¶m, sizeof(param)); 8167 if (err != OK) { 8168 break; 8169 } 8170 // For audio, level is ignored. 8171 builder->addProfileLevel(param.eProfile, 0 /* level */); 8172 8173 if (index == kMaxIndicesToCheck) { 8174 ALOGW("[%s] stopping checking profiles after %u: %x", 8175 name.c_str(), index, 8176 param.eProfile); 8177 } 8178 } 8179 8180 // NOTE: Without Android extensions, OMX does not provide a way to query 8181 // AAC profile support 8182 if (param.nProfileIndex == 0) { 8183 ALOGW("component %s doesn't support profile query.", name.c_str()); 8184 } 8185 } 8186 8187 if (isVideo && !isEncoder) { 8188 native_handle_t *sidebandHandle = NULL; 8189 if (omx->configureVideoTunnelMode( 8190 node, kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 8191 // tunneled playback includes adaptive playback 8192 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 8193 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 8194 } else if (omx->storeMetaDataInBuffers( 8195 node, kPortIndexOutput, OMX_TRUE) == OK || 8196 omx->prepareForAdaptivePlayback( 8197 node, kPortIndexOutput, OMX_TRUE, 8198 1280 /* width */, 720 /* height */) == OK) { 8199 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 8200 } 8201 } 8202 8203 if (isVideo && isEncoder) { 8204 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 8205 InitOMXParams(¶ms); 8206 params.nPortIndex = kPortIndexOutput; 8207 // TODO: should we verify if fallback is supported? 8208 if (omx->getConfig( 8209 node, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 8210 ¶ms, sizeof(params)) == OK) { 8211 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 8212 } 8213 } 8214 8215 *caps = builder; 8216 omx->freeNode(node); 8217 client.disconnect(); 8218 return OK; 8219} 8220 8221// These are supposed be equivalent to the logic in 8222// "audio_channel_out_mask_from_count". 8223//static 8224status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 8225 switch (numChannels) { 8226 case 1: 8227 map[0] = OMX_AUDIO_ChannelCF; 8228 break; 8229 case 2: 8230 map[0] = OMX_AUDIO_ChannelLF; 8231 map[1] = OMX_AUDIO_ChannelRF; 8232 break; 8233 case 3: 8234 map[0] = OMX_AUDIO_ChannelLF; 8235 map[1] = OMX_AUDIO_ChannelRF; 8236 map[2] = OMX_AUDIO_ChannelCF; 8237 break; 8238 case 4: 8239 map[0] = OMX_AUDIO_ChannelLF; 8240 map[1] = OMX_AUDIO_ChannelRF; 8241 map[2] = OMX_AUDIO_ChannelLR; 8242 map[3] = OMX_AUDIO_ChannelRR; 8243 break; 8244 case 5: 8245 map[0] = OMX_AUDIO_ChannelLF; 8246 map[1] = OMX_AUDIO_ChannelRF; 8247 map[2] = OMX_AUDIO_ChannelCF; 8248 map[3] = OMX_AUDIO_ChannelLR; 8249 map[4] = OMX_AUDIO_ChannelRR; 8250 break; 8251 case 6: 8252 map[0] = OMX_AUDIO_ChannelLF; 8253 map[1] = OMX_AUDIO_ChannelRF; 8254 map[2] = OMX_AUDIO_ChannelCF; 8255 map[3] = OMX_AUDIO_ChannelLFE; 8256 map[4] = OMX_AUDIO_ChannelLR; 8257 map[5] = OMX_AUDIO_ChannelRR; 8258 break; 8259 case 7: 8260 map[0] = OMX_AUDIO_ChannelLF; 8261 map[1] = OMX_AUDIO_ChannelRF; 8262 map[2] = OMX_AUDIO_ChannelCF; 8263 map[3] = OMX_AUDIO_ChannelLFE; 8264 map[4] = OMX_AUDIO_ChannelLR; 8265 map[5] = OMX_AUDIO_ChannelRR; 8266 map[6] = OMX_AUDIO_ChannelCS; 8267 break; 8268 case 8: 8269 map[0] = OMX_AUDIO_ChannelLF; 8270 map[1] = OMX_AUDIO_ChannelRF; 8271 map[2] = OMX_AUDIO_ChannelCF; 8272 map[3] = OMX_AUDIO_ChannelLFE; 8273 map[4] = OMX_AUDIO_ChannelLR; 8274 map[5] = OMX_AUDIO_ChannelRR; 8275 map[6] = OMX_AUDIO_ChannelLS; 8276 map[7] = OMX_AUDIO_ChannelRS; 8277 break; 8278 default: 8279 return -EINVAL; 8280 } 8281 8282 return OK; 8283} 8284 8285} // namespace android 8286