ACodec.cpp revision b1302149f69689b805362a116c76c33ecc211ab3
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47 48#include <OMX_AudioExt.h> 49#include <OMX_VideoExt.h> 50#include <OMX_Component.h> 51#include <OMX_IndexExt.h> 52#include <OMX_AsString.h> 53 54#include "include/avc_utils.h" 55#include "include/DataConverter.h" 56#include "omx/OMXUtils.h" 57 58namespace android { 59 60enum { 61 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 62}; 63 64// OMX errors are directly mapped into status_t range if 65// there is no corresponding MediaError status code. 66// Use the statusFromOMXError(int32_t omxError) function. 67// 68// Currently this is a direct map. 69// See frameworks/native/include/media/openmax/OMX_Core.h 70// 71// Vendor OMX errors from 0x90000000 - 0x9000FFFF 72// Extension OMX errors from 0x8F000000 - 0x90000000 73// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 74// 75 76// returns true if err is a recognized OMX error code. 77// as OMX error is OMX_S32, this is an int32_t type 78static inline bool isOMXError(int32_t err) { 79 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 80} 81 82// converts an OMX error to a status_t 83static inline status_t statusFromOMXError(int32_t omxError) { 84 switch (omxError) { 85 case OMX_ErrorInvalidComponentName: 86 case OMX_ErrorComponentNotFound: 87 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 88 default: 89 return isOMXError(omxError) ? omxError : 0; // no translation required 90 } 91} 92 93// checks and converts status_t to a non-side-effect status_t 94static inline status_t makeNoSideEffectStatus(status_t err) { 95 switch (err) { 96 // the following errors have side effects and may come 97 // from other code modules. Remap for safety reasons. 98 case INVALID_OPERATION: 99 case DEAD_OBJECT: 100 return UNKNOWN_ERROR; 101 default: 102 return err; 103 } 104} 105 106struct MessageList : public RefBase { 107 MessageList() { 108 } 109 virtual ~MessageList() { 110 } 111 std::list<sp<AMessage> > &getList() { return mList; } 112private: 113 std::list<sp<AMessage> > mList; 114 115 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 116}; 117 118static sp<DataConverter> getCopyConverter() { 119 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited 120 static sp<DataConverter> sCopyConverter; // zero-inited 121 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); 122 return sCopyConverter; 123} 124 125struct CodecObserver : public BnOMXObserver { 126 CodecObserver() {} 127 128 void setNotificationMessage(const sp<AMessage> &msg) { 129 mNotify = msg; 130 } 131 132 // from IOMXObserver 133 virtual void onMessages(const std::list<omx_message> &messages) { 134 if (messages.empty()) { 135 return; 136 } 137 138 sp<AMessage> notify = mNotify->dup(); 139 bool first = true; 140 sp<MessageList> msgList = new MessageList(); 141 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 142 it != messages.cend(); ++it) { 143 const omx_message &omx_msg = *it; 144 if (first) { 145 notify->setInt32("node", omx_msg.node); 146 first = false; 147 } 148 149 sp<AMessage> msg = new AMessage; 150 msg->setInt32("type", omx_msg.type); 151 switch (omx_msg.type) { 152 case omx_message::EVENT: 153 { 154 msg->setInt32("event", omx_msg.u.event_data.event); 155 msg->setInt32("data1", omx_msg.u.event_data.data1); 156 msg->setInt32("data2", omx_msg.u.event_data.data2); 157 break; 158 } 159 160 case omx_message::EMPTY_BUFFER_DONE: 161 { 162 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 163 msg->setInt32("fence_fd", omx_msg.fenceFd); 164 break; 165 } 166 167 case omx_message::FILL_BUFFER_DONE: 168 { 169 msg->setInt32( 170 "buffer", omx_msg.u.extended_buffer_data.buffer); 171 msg->setInt32( 172 "range_offset", 173 omx_msg.u.extended_buffer_data.range_offset); 174 msg->setInt32( 175 "range_length", 176 omx_msg.u.extended_buffer_data.range_length); 177 msg->setInt32( 178 "flags", 179 omx_msg.u.extended_buffer_data.flags); 180 msg->setInt64( 181 "timestamp", 182 omx_msg.u.extended_buffer_data.timestamp); 183 msg->setInt32( 184 "fence_fd", omx_msg.fenceFd); 185 break; 186 } 187 188 case omx_message::FRAME_RENDERED: 189 { 190 msg->setInt64( 191 "media_time_us", omx_msg.u.render_data.timestamp); 192 msg->setInt64( 193 "system_nano", omx_msg.u.render_data.nanoTime); 194 break; 195 } 196 197 default: 198 ALOGE("Unrecognized message type: %d", omx_msg.type); 199 break; 200 } 201 msgList->getList().push_back(msg); 202 } 203 notify->setObject("messages", msgList); 204 notify->post(); 205 } 206 207protected: 208 virtual ~CodecObserver() {} 209 210private: 211 sp<AMessage> mNotify; 212 213 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 214}; 215 216//////////////////////////////////////////////////////////////////////////////// 217 218struct ACodec::BaseState : public AState { 219 explicit BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 220 221protected: 222 enum PortMode { 223 KEEP_BUFFERS, 224 RESUBMIT_BUFFERS, 225 FREE_BUFFERS, 226 }; 227 228 ACodec *mCodec; 229 230 virtual PortMode getPortMode(OMX_U32 portIndex); 231 232 virtual bool onMessageReceived(const sp<AMessage> &msg); 233 234 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 235 236 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 237 virtual void onInputBufferFilled(const sp<AMessage> &msg); 238 239 void postFillThisBuffer(BufferInfo *info); 240 241private: 242 // Handles an OMX message. Returns true iff message was handled. 243 bool onOMXMessage(const sp<AMessage> &msg); 244 245 // Handles a list of messages. Returns true iff messages were handled. 246 bool onOMXMessageList(const sp<AMessage> &msg); 247 248 // returns true iff this message is for this component and the component is alive 249 bool checkOMXMessage(const sp<AMessage> &msg); 250 251 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 252 253 bool onOMXFillBufferDone( 254 IOMX::buffer_id bufferID, 255 size_t rangeOffset, size_t rangeLength, 256 OMX_U32 flags, 257 int64_t timeUs, 258 int fenceFd); 259 260 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 261 262 void getMoreInputDataIfPossible(); 263 264 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 265}; 266 267//////////////////////////////////////////////////////////////////////////////// 268 269struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 270 explicit DeathNotifier(const sp<AMessage> ¬ify) 271 : mNotify(notify) { 272 } 273 274 virtual void binderDied(const wp<IBinder> &) { 275 mNotify->post(); 276 } 277 278protected: 279 virtual ~DeathNotifier() {} 280 281private: 282 sp<AMessage> mNotify; 283 284 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 285}; 286 287struct ACodec::UninitializedState : public ACodec::BaseState { 288 explicit UninitializedState(ACodec *codec); 289 290protected: 291 virtual bool onMessageReceived(const sp<AMessage> &msg); 292 virtual void stateEntered(); 293 294private: 295 void onSetup(const sp<AMessage> &msg); 296 bool onAllocateComponent(const sp<AMessage> &msg); 297 298 sp<DeathNotifier> mDeathNotifier; 299 300 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 301}; 302 303//////////////////////////////////////////////////////////////////////////////// 304 305struct ACodec::LoadedState : public ACodec::BaseState { 306 explicit LoadedState(ACodec *codec); 307 308protected: 309 virtual bool onMessageReceived(const sp<AMessage> &msg); 310 virtual void stateEntered(); 311 312private: 313 friend struct ACodec::UninitializedState; 314 315 bool onConfigureComponent(const sp<AMessage> &msg); 316 void onCreateInputSurface(const sp<AMessage> &msg); 317 void onSetInputSurface(const sp<AMessage> &msg); 318 void onStart(); 319 void onShutdown(bool keepComponentAllocated); 320 321 status_t setupInputSurface(); 322 323 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 324}; 325 326//////////////////////////////////////////////////////////////////////////////// 327 328struct ACodec::LoadedToIdleState : public ACodec::BaseState { 329 explicit LoadedToIdleState(ACodec *codec); 330 331protected: 332 virtual bool onMessageReceived(const sp<AMessage> &msg); 333 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 334 virtual void stateEntered(); 335 336private: 337 status_t allocateBuffers(); 338 339 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 340}; 341 342//////////////////////////////////////////////////////////////////////////////// 343 344struct ACodec::IdleToExecutingState : public ACodec::BaseState { 345 explicit IdleToExecutingState(ACodec *codec); 346 347protected: 348 virtual bool onMessageReceived(const sp<AMessage> &msg); 349 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 350 virtual void stateEntered(); 351 352private: 353 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 354}; 355 356//////////////////////////////////////////////////////////////////////////////// 357 358struct ACodec::ExecutingState : public ACodec::BaseState { 359 explicit ExecutingState(ACodec *codec); 360 361 void submitRegularOutputBuffers(); 362 void submitOutputMetaBuffers(); 363 void submitOutputBuffers(); 364 365 // Submit output buffers to the decoder, submit input buffers to client 366 // to fill with data. 367 void resume(); 368 369 // Returns true iff input and output buffers are in play. 370 bool active() const { return mActive; } 371 372protected: 373 virtual PortMode getPortMode(OMX_U32 portIndex); 374 virtual bool onMessageReceived(const sp<AMessage> &msg); 375 virtual void stateEntered(); 376 377 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 378 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 379 380private: 381 bool mActive; 382 383 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 384}; 385 386//////////////////////////////////////////////////////////////////////////////// 387 388struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 389 explicit OutputPortSettingsChangedState(ACodec *codec); 390 391protected: 392 virtual PortMode getPortMode(OMX_U32 portIndex); 393 virtual bool onMessageReceived(const sp<AMessage> &msg); 394 virtual void stateEntered(); 395 396 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 397 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 398 399private: 400 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 401}; 402 403//////////////////////////////////////////////////////////////////////////////// 404 405struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 406 explicit ExecutingToIdleState(ACodec *codec); 407 408protected: 409 virtual bool onMessageReceived(const sp<AMessage> &msg); 410 virtual void stateEntered(); 411 412 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 413 414 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 415 virtual void onInputBufferFilled(const sp<AMessage> &msg); 416 417private: 418 void changeStateIfWeOwnAllBuffers(); 419 420 bool mComponentNowIdle; 421 422 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 423}; 424 425//////////////////////////////////////////////////////////////////////////////// 426 427struct ACodec::IdleToLoadedState : public ACodec::BaseState { 428 explicit IdleToLoadedState(ACodec *codec); 429 430protected: 431 virtual bool onMessageReceived(const sp<AMessage> &msg); 432 virtual void stateEntered(); 433 434 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 435 436private: 437 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 438}; 439 440//////////////////////////////////////////////////////////////////////////////// 441 442struct ACodec::FlushingState : public ACodec::BaseState { 443 explicit FlushingState(ACodec *codec); 444 445protected: 446 virtual bool onMessageReceived(const sp<AMessage> &msg); 447 virtual void stateEntered(); 448 449 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 450 451 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 452 virtual void onInputBufferFilled(const sp<AMessage> &msg); 453 454private: 455 bool mFlushComplete[2]; 456 457 void changeStateIfWeOwnAllBuffers(); 458 459 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 460}; 461 462//////////////////////////////////////////////////////////////////////////////// 463 464void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 465 if (mFenceFd >= 0) { 466 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 467 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 468 } 469 mFenceFd = fenceFd; 470 mIsReadFence = false; 471} 472 473void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 474 if (mFenceFd >= 0) { 475 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 476 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 477 } 478 mFenceFd = fenceFd; 479 mIsReadFence = true; 480} 481 482void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 483 if (mFenceFd >= 0 && mIsReadFence) { 484 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 485 } 486} 487 488void ACodec::BufferInfo::checkReadFence(const char *dbg) { 489 if (mFenceFd >= 0 && !mIsReadFence) { 490 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 491 } 492} 493 494//////////////////////////////////////////////////////////////////////////////// 495 496ACodec::ACodec() 497 : mSampleRate(0), 498 mQuirks(0), 499 mNode(0), 500 mUsingNativeWindow(false), 501 mNativeWindowUsageBits(0), 502 mIsVideo(false), 503 mIsEncoder(false), 504 mFatalError(false), 505 mShutdownInProgress(false), 506 mExplicitShutdown(false), 507 mIsLegacyVP9Decoder(false), 508 mEncoderDelay(0), 509 mEncoderPadding(0), 510 mRotationDegrees(0), 511 mChannelMaskPresent(false), 512 mChannelMask(0), 513 mDequeueCounter(0), 514 mInputMetadataType(kMetadataBufferTypeInvalid), 515 mOutputMetadataType(kMetadataBufferTypeInvalid), 516 mLegacyAdaptiveExperiment(false), 517 mMetadataBuffersToSubmit(0), 518 mNumUndequeuedBuffers(0), 519 mRepeatFrameDelayUs(-1ll), 520 mMaxPtsGapUs(-1ll), 521 mMaxFps(-1), 522 mTimePerFrameUs(-1ll), 523 mTimePerCaptureUs(-1ll), 524 mCreateInputBuffersSuspended(false), 525 mTunneled(false), 526 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0), 527 mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0) { 528 mUninitializedState = new UninitializedState(this); 529 mLoadedState = new LoadedState(this); 530 mLoadedToIdleState = new LoadedToIdleState(this); 531 mIdleToExecutingState = new IdleToExecutingState(this); 532 mExecutingState = new ExecutingState(this); 533 534 mOutputPortSettingsChangedState = 535 new OutputPortSettingsChangedState(this); 536 537 mExecutingToIdleState = new ExecutingToIdleState(this); 538 mIdleToLoadedState = new IdleToLoadedState(this); 539 mFlushingState = new FlushingState(this); 540 541 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 542 mInputEOSResult = OK; 543 544 changeState(mUninitializedState); 545} 546 547ACodec::~ACodec() { 548} 549 550void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 551 mNotify = msg; 552} 553 554void ACodec::initiateSetup(const sp<AMessage> &msg) { 555 msg->setWhat(kWhatSetup); 556 msg->setTarget(this); 557 msg->post(); 558} 559 560void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 561 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 562 msg->setMessage("params", params); 563 msg->post(); 564} 565 566void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 567 msg->setWhat(kWhatAllocateComponent); 568 msg->setTarget(this); 569 msg->post(); 570} 571 572void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 573 msg->setWhat(kWhatConfigureComponent); 574 msg->setTarget(this); 575 msg->post(); 576} 577 578status_t ACodec::setSurface(const sp<Surface> &surface) { 579 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 580 msg->setObject("surface", surface); 581 582 sp<AMessage> response; 583 status_t err = msg->postAndAwaitResponse(&response); 584 585 if (err == OK) { 586 (void)response->findInt32("err", &err); 587 } 588 return err; 589} 590 591void ACodec::initiateCreateInputSurface() { 592 (new AMessage(kWhatCreateInputSurface, this))->post(); 593} 594 595void ACodec::initiateSetInputSurface( 596 const sp<PersistentSurface> &surface) { 597 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 598 msg->setObject("input-surface", surface); 599 msg->post(); 600} 601 602void ACodec::signalEndOfInputStream() { 603 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 604} 605 606void ACodec::initiateStart() { 607 (new AMessage(kWhatStart, this))->post(); 608} 609 610void ACodec::signalFlush() { 611 ALOGV("[%s] signalFlush", mComponentName.c_str()); 612 (new AMessage(kWhatFlush, this))->post(); 613} 614 615void ACodec::signalResume() { 616 (new AMessage(kWhatResume, this))->post(); 617} 618 619void ACodec::initiateShutdown(bool keepComponentAllocated) { 620 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 621 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 622 msg->post(); 623 if (!keepComponentAllocated) { 624 // ensure shutdown completes in 3 seconds 625 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 626 } 627} 628 629void ACodec::signalRequestIDRFrame() { 630 (new AMessage(kWhatRequestIDRFrame, this))->post(); 631} 632 633// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 634// Some codecs may return input buffers before having them processed. 635// This causes a halt if we already signaled an EOS on the input 636// port. For now keep submitting an output buffer if there was an 637// EOS on the input port, but not yet on the output port. 638void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 639 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 640 mMetadataBuffersToSubmit > 0) { 641 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 642 } 643} 644 645status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 646 // allow keeping unset surface 647 if (surface == NULL) { 648 if (mNativeWindow != NULL) { 649 ALOGW("cannot unset a surface"); 650 return INVALID_OPERATION; 651 } 652 return OK; 653 } 654 655 // cannot switch from bytebuffers to surface 656 if (mNativeWindow == NULL) { 657 ALOGW("component was not configured with a surface"); 658 return INVALID_OPERATION; 659 } 660 661 ANativeWindow *nativeWindow = surface.get(); 662 // if we have not yet started the codec, we can simply set the native window 663 if (mBuffers[kPortIndexInput].size() == 0) { 664 mNativeWindow = surface; 665 return OK; 666 } 667 668 // we do not support changing a tunneled surface after start 669 if (mTunneled) { 670 ALOGW("cannot change tunneled surface"); 671 return INVALID_OPERATION; 672 } 673 674 int usageBits = 0; 675 status_t err = setupNativeWindowSizeFormatAndUsage(nativeWindow, &usageBits); 676 if (err != OK) { 677 return err; 678 } 679 680 int ignoredFlags = kVideoGrallocUsage; 681 // New output surface is not allowed to add new usage flag except ignored ones. 682 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 683 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 684 return BAD_VALUE; 685 } 686 687 // get min undequeued count. We cannot switch to a surface that has a higher 688 // undequeued count than we allocated. 689 int minUndequeuedBuffers = 0; 690 err = nativeWindow->query( 691 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 692 &minUndequeuedBuffers); 693 if (err != 0) { 694 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 695 strerror(-err), -err); 696 return err; 697 } 698 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 699 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 700 minUndequeuedBuffers, mNumUndequeuedBuffers); 701 return BAD_VALUE; 702 } 703 704 // we cannot change the number of output buffers while OMX is running 705 // set up surface to the same count 706 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 707 ALOGV("setting up surface for %zu buffers", buffers.size()); 708 709 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 710 if (err != 0) { 711 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 712 -err); 713 return err; 714 } 715 716 // need to enable allocation when attaching 717 surface->getIGraphicBufferProducer()->allowAllocation(true); 718 719 // for meta data mode, we move dequeud buffers to the new surface. 720 // for non-meta mode, we must move all registered buffers 721 for (size_t i = 0; i < buffers.size(); ++i) { 722 const BufferInfo &info = buffers[i]; 723 // skip undequeued buffers for meta data mode 724 if (storingMetadataInDecodedBuffers() 725 && !mLegacyAdaptiveExperiment 726 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 727 ALOGV("skipping buffer"); 728 continue; 729 } 730 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 731 732 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 733 if (err != OK) { 734 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 735 info.mGraphicBuffer->getNativeBuffer(), 736 strerror(-err), -err); 737 return err; 738 } 739 } 740 741 // cancel undequeued buffers to new surface 742 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 743 for (size_t i = 0; i < buffers.size(); ++i) { 744 BufferInfo &info = buffers.editItemAt(i); 745 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 746 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 747 err = nativeWindow->cancelBuffer( 748 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 749 info.mFenceFd = -1; 750 if (err != OK) { 751 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 752 info.mGraphicBuffer->getNativeBuffer(), 753 strerror(-err), -err); 754 return err; 755 } 756 } 757 } 758 // disallow further allocation 759 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 760 } 761 762 // push blank buffers to previous window if requested 763 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 764 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 765 } 766 767 mNativeWindow = nativeWindow; 768 mNativeWindowUsageBits = usageBits; 769 return OK; 770} 771 772status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 773 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 774 775 CHECK(mDealer[portIndex] == NULL); 776 CHECK(mBuffers[portIndex].isEmpty()); 777 778 status_t err; 779 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 780 if (storingMetadataInDecodedBuffers()) { 781 err = allocateOutputMetadataBuffers(); 782 } else { 783 err = allocateOutputBuffersFromNativeWindow(); 784 } 785 } else { 786 OMX_PARAM_PORTDEFINITIONTYPE def; 787 InitOMXParams(&def); 788 def.nPortIndex = portIndex; 789 790 err = mOMX->getParameter( 791 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 792 793 if (err == OK) { 794 MetadataBufferType type = 795 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 796 size_t bufSize = def.nBufferSize; 797 if (type == kMetadataBufferTypeANWBuffer) { 798 bufSize = sizeof(VideoNativeMetadata); 799 } else if (type == kMetadataBufferTypeNativeHandleSource) { 800 bufSize = sizeof(VideoNativeHandleMetadata); 801 } 802 803 // If using gralloc or native source input metadata buffers, allocate largest 804 // metadata size as we prefer to generate native source metadata, but component 805 // may require gralloc source. For camera source, allocate at least enough 806 // size for native metadata buffers. 807 size_t allottedSize = bufSize; 808 if (portIndex == kPortIndexInput && type == kMetadataBufferTypeANWBuffer) { 809 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 810 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 811 bufSize = max(bufSize, sizeof(VideoNativeMetadata)); 812 } 813 814 size_t conversionBufferSize = 0; 815 816 sp<DataConverter> converter = mConverter[portIndex]; 817 if (converter != NULL) { 818 // here we assume sane conversions of max 4:1, so result fits in int32 819 if (portIndex == kPortIndexInput) { 820 conversionBufferSize = converter->sourceSize(bufSize); 821 } else { 822 conversionBufferSize = converter->targetSize(bufSize); 823 } 824 } 825 826 size_t alignment = MemoryDealer::getAllocationAlignment(); 827 828 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", 829 mComponentName.c_str(), 830 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 831 portIndex == kPortIndexInput ? "input" : "output"); 832 833 // verify buffer sizes to avoid overflow in align() 834 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { 835 ALOGE("b/22885421"); 836 return NO_MEMORY; 837 } 838 839 // don't modify bufSize as OMX may not expect it to increase after negotiation 840 size_t alignedSize = align(bufSize, alignment); 841 size_t alignedConvSize = align(conversionBufferSize, alignment); 842 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { 843 ALOGE("b/22885421"); 844 return NO_MEMORY; 845 } 846 847 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); 848 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 849 850 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 851 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 852 if (mem == NULL || mem->pointer() == NULL) { 853 return NO_MEMORY; 854 } 855 856 BufferInfo info; 857 info.mStatus = BufferInfo::OWNED_BY_US; 858 info.mFenceFd = -1; 859 info.mRenderInfo = NULL; 860 info.mNativeHandle = NULL; 861 862 uint32_t requiresAllocateBufferBit = 863 (portIndex == kPortIndexInput) 864 ? kRequiresAllocateBufferOnInputPorts 865 : kRequiresAllocateBufferOnOutputPorts; 866 867 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 868 mem.clear(); 869 870 void *ptr = NULL; 871 sp<NativeHandle> native_handle; 872 err = mOMX->allocateSecureBuffer( 873 mNode, portIndex, bufSize, &info.mBufferID, 874 &ptr, &native_handle); 875 876 // TRICKY: this representation is unorthodox, but ACodec requires 877 // an ABuffer with a proper size to validate range offsets and lengths. 878 // Since mData is never referenced for secure input, it is used to store 879 // either the pointer to the secure buffer, or the opaque handle as on 880 // some devices ptr is actually an opaque handle, not a pointer. 881 882 // TRICKY2: use native handle as the base of the ABuffer if received one, 883 // because Widevine source only receives these base addresses. 884 const native_handle_t *native_handle_ptr = 885 native_handle == NULL ? NULL : native_handle->handle(); 886 info.mData = new ABuffer( 887 ptr != NULL ? ptr : (void *)native_handle_ptr, bufSize); 888 info.mNativeHandle = native_handle; 889 info.mCodecData = info.mData; 890 } else if (mQuirks & requiresAllocateBufferBit) { 891 err = mOMX->allocateBufferWithBackup( 892 mNode, portIndex, mem, &info.mBufferID, allottedSize); 893 } else { 894 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize); 895 } 896 897 if (mem != NULL) { 898 info.mCodecData = new ABuffer(mem->pointer(), bufSize); 899 info.mCodecRef = mem; 900 901 if (type == kMetadataBufferTypeANWBuffer) { 902 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 903 } 904 905 // if we require conversion, allocate conversion buffer for client use; 906 // otherwise, reuse codec buffer 907 if (mConverter[portIndex] != NULL) { 908 CHECK_GT(conversionBufferSize, (size_t)0); 909 mem = mDealer[portIndex]->allocate(conversionBufferSize); 910 if (mem == NULL|| mem->pointer() == NULL) { 911 return NO_MEMORY; 912 } 913 info.mData = new ABuffer(mem->pointer(), conversionBufferSize); 914 info.mMemRef = mem; 915 } else { 916 info.mData = info.mCodecData; 917 info.mMemRef = info.mCodecRef; 918 } 919 } 920 921 mBuffers[portIndex].push(info); 922 } 923 } 924 } 925 926 if (err != OK) { 927 return err; 928 } 929 930 sp<AMessage> notify = mNotify->dup(); 931 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 932 933 notify->setInt32("portIndex", portIndex); 934 935 sp<PortDescription> desc = new PortDescription; 936 937 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 938 const BufferInfo &info = mBuffers[portIndex][i]; 939 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef); 940 } 941 942 notify->setObject("portDesc", desc); 943 notify->post(); 944 945 return OK; 946} 947 948status_t ACodec::setupNativeWindowSizeFormatAndUsage( 949 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */) { 950 OMX_PARAM_PORTDEFINITIONTYPE def; 951 InitOMXParams(&def); 952 def.nPortIndex = kPortIndexOutput; 953 954 status_t err = mOMX->getParameter( 955 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 956 957 if (err != OK) { 958 return err; 959 } 960 961 OMX_U32 usage = 0; 962 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 963 if (err != 0) { 964 ALOGW("querying usage flags from OMX IL component failed: %d", err); 965 // XXX: Currently this error is logged, but not fatal. 966 usage = 0; 967 } 968 int omxUsage = usage; 969 970 if (mFlags & kFlagIsGrallocUsageProtected) { 971 usage |= GRALLOC_USAGE_PROTECTED; 972 } 973 974 usage |= kVideoGrallocUsage; 975 *finalUsage = usage; 976 977 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 978 return setNativeWindowSizeFormatAndUsage( 979 nativeWindow, 980 def.format.video.nFrameWidth, 981 def.format.video.nFrameHeight, 982 def.format.video.eColorFormat, 983 mRotationDegrees, 984 usage); 985} 986 987status_t ACodec::configureOutputBuffersFromNativeWindow( 988 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 989 OMX_U32 *minUndequeuedBuffers) { 990 OMX_PARAM_PORTDEFINITIONTYPE def; 991 InitOMXParams(&def); 992 def.nPortIndex = kPortIndexOutput; 993 994 status_t err = mOMX->getParameter( 995 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 996 997 if (err == OK) { 998 err = setupNativeWindowSizeFormatAndUsage(mNativeWindow.get(), &mNativeWindowUsageBits); 999 } 1000 if (err != OK) { 1001 mNativeWindowUsageBits = 0; 1002 return err; 1003 } 1004 1005 // Exits here for tunneled video playback codecs -- i.e. skips native window 1006 // buffer allocation step as this is managed by the tunneled OMX omponent 1007 // itself and explicitly sets def.nBufferCountActual to 0. 1008 if (mTunneled) { 1009 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 1010 def.nBufferCountActual = 0; 1011 err = mOMX->setParameter( 1012 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1013 1014 *minUndequeuedBuffers = 0; 1015 *bufferCount = 0; 1016 *bufferSize = 0; 1017 return err; 1018 } 1019 1020 *minUndequeuedBuffers = 0; 1021 err = mNativeWindow->query( 1022 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 1023 (int *)minUndequeuedBuffers); 1024 1025 if (err != 0) { 1026 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 1027 strerror(-err), -err); 1028 return err; 1029 } 1030 1031 // FIXME: assume that surface is controlled by app (native window 1032 // returns the number for the case when surface is not controlled by app) 1033 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 1034 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 1035 1036 // Use conservative allocation while also trying to reduce starvation 1037 // 1038 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 1039 // minimum needed for the consumer to be able to work 1040 // 2. try to allocate two (2) additional buffers to reduce starvation from 1041 // the consumer 1042 // plus an extra buffer to account for incorrect minUndequeuedBufs 1043 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 1044 OMX_U32 newBufferCount = 1045 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1046 def.nBufferCountActual = newBufferCount; 1047 err = mOMX->setParameter( 1048 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1049 1050 if (err == OK) { 1051 *minUndequeuedBuffers += extraBuffers; 1052 break; 1053 } 1054 1055 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1056 mComponentName.c_str(), newBufferCount, err); 1057 /* exit condition */ 1058 if (extraBuffers == 0) { 1059 return err; 1060 } 1061 } 1062 1063 err = native_window_set_buffer_count( 1064 mNativeWindow.get(), def.nBufferCountActual); 1065 1066 if (err != 0) { 1067 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1068 -err); 1069 return err; 1070 } 1071 1072 *bufferCount = def.nBufferCountActual; 1073 *bufferSize = def.nBufferSize; 1074 return err; 1075} 1076 1077status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1078 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1079 status_t err = configureOutputBuffersFromNativeWindow( 1080 &bufferCount, &bufferSize, &minUndequeuedBuffers); 1081 if (err != 0) 1082 return err; 1083 mNumUndequeuedBuffers = minUndequeuedBuffers; 1084 1085 if (!storingMetadataInDecodedBuffers()) { 1086 static_cast<Surface*>(mNativeWindow.get()) 1087 ->getIGraphicBufferProducer()->allowAllocation(true); 1088 } 1089 1090 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1091 "output port", 1092 mComponentName.c_str(), bufferCount, bufferSize); 1093 1094 // Dequeue buffers and send them to OMX 1095 for (OMX_U32 i = 0; i < bufferCount; i++) { 1096 ANativeWindowBuffer *buf; 1097 int fenceFd; 1098 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1099 if (err != 0) { 1100 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1101 break; 1102 } 1103 1104 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1105 BufferInfo info; 1106 info.mStatus = BufferInfo::OWNED_BY_US; 1107 info.mFenceFd = fenceFd; 1108 info.mIsReadFence = false; 1109 info.mRenderInfo = NULL; 1110 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 1111 info.mCodecData = info.mData; 1112 info.mGraphicBuffer = graphicBuffer; 1113 mBuffers[kPortIndexOutput].push(info); 1114 1115 IOMX::buffer_id bufferId; 1116 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 1117 &bufferId); 1118 if (err != 0) { 1119 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1120 "%d", i, err); 1121 break; 1122 } 1123 1124 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1125 1126 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1127 mComponentName.c_str(), 1128 bufferId, graphicBuffer.get()); 1129 } 1130 1131 OMX_U32 cancelStart; 1132 OMX_U32 cancelEnd; 1133 1134 if (err != 0) { 1135 // If an error occurred while dequeuing we need to cancel any buffers 1136 // that were dequeued. 1137 cancelStart = 0; 1138 cancelEnd = mBuffers[kPortIndexOutput].size(); 1139 } else { 1140 // Return the required minimum undequeued buffers to the native window. 1141 cancelStart = bufferCount - minUndequeuedBuffers; 1142 cancelEnd = bufferCount; 1143 } 1144 1145 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1146 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1147 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1148 status_t error = cancelBufferToNativeWindow(info); 1149 if (err == 0) { 1150 err = error; 1151 } 1152 } 1153 } 1154 1155 if (!storingMetadataInDecodedBuffers()) { 1156 static_cast<Surface*>(mNativeWindow.get()) 1157 ->getIGraphicBufferProducer()->allowAllocation(false); 1158 } 1159 1160 return err; 1161} 1162 1163status_t ACodec::allocateOutputMetadataBuffers() { 1164 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1165 status_t err = configureOutputBuffersFromNativeWindow( 1166 &bufferCount, &bufferSize, &minUndequeuedBuffers); 1167 if (err != 0) 1168 return err; 1169 mNumUndequeuedBuffers = minUndequeuedBuffers; 1170 1171 ALOGV("[%s] Allocating %u meta buffers on output port", 1172 mComponentName.c_str(), bufferCount); 1173 1174 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1175 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1176 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1177 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1178 1179 // Dequeue buffers and send them to OMX 1180 for (OMX_U32 i = 0; i < bufferCount; i++) { 1181 BufferInfo info; 1182 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1183 info.mFenceFd = -1; 1184 info.mRenderInfo = NULL; 1185 info.mGraphicBuffer = NULL; 1186 info.mDequeuedAt = mDequeueCounter; 1187 1188 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1189 if (mem == NULL || mem->pointer() == NULL) { 1190 return NO_MEMORY; 1191 } 1192 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1193 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1194 } 1195 info.mData = new ABuffer(mem->pointer(), mem->size()); 1196 info.mMemRef = mem; 1197 info.mCodecData = info.mData; 1198 info.mCodecRef = mem; 1199 1200 // we use useBuffer for metadata regardless of quirks 1201 err = mOMX->useBuffer( 1202 mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1203 mBuffers[kPortIndexOutput].push(info); 1204 1205 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1206 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1207 } 1208 1209 if (mLegacyAdaptiveExperiment) { 1210 // preallocate and preregister buffers 1211 static_cast<Surface *>(mNativeWindow.get()) 1212 ->getIGraphicBufferProducer()->allowAllocation(true); 1213 1214 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1215 "output port", 1216 mComponentName.c_str(), bufferCount, bufferSize); 1217 1218 // Dequeue buffers then cancel them all 1219 for (OMX_U32 i = 0; i < bufferCount; i++) { 1220 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1221 1222 ANativeWindowBuffer *buf; 1223 int fenceFd; 1224 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1225 if (err != 0) { 1226 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1227 break; 1228 } 1229 1230 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1231 mOMX->updateGraphicBufferInMeta( 1232 mNode, kPortIndexOutput, graphicBuffer, info->mBufferID); 1233 info->mStatus = BufferInfo::OWNED_BY_US; 1234 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1235 info->mGraphicBuffer = graphicBuffer; 1236 } 1237 1238 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1239 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1240 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1241 status_t error = cancelBufferToNativeWindow(info); 1242 if (err == OK) { 1243 err = error; 1244 } 1245 } 1246 } 1247 1248 static_cast<Surface*>(mNativeWindow.get()) 1249 ->getIGraphicBufferProducer()->allowAllocation(false); 1250 } 1251 1252 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1253 return err; 1254} 1255 1256status_t ACodec::submitOutputMetadataBuffer() { 1257 CHECK(storingMetadataInDecodedBuffers()); 1258 if (mMetadataBuffersToSubmit == 0) 1259 return OK; 1260 1261 BufferInfo *info = dequeueBufferFromNativeWindow(); 1262 if (info == NULL) { 1263 return ERROR_IO; 1264 } 1265 1266 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1267 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1268 1269 --mMetadataBuffersToSubmit; 1270 info->checkWriteFence("submitOutputMetadataBuffer"); 1271 status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd); 1272 info->mFenceFd = -1; 1273 if (err == OK) { 1274 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1275 } 1276 1277 return err; 1278} 1279 1280status_t ACodec::waitForFence(int fd, const char *dbg ) { 1281 status_t res = OK; 1282 if (fd >= 0) { 1283 sp<Fence> fence = new Fence(fd); 1284 res = fence->wait(IOMX::kFenceTimeoutMs); 1285 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1286 } 1287 return res; 1288} 1289 1290// static 1291const char *ACodec::_asString(BufferInfo::Status s) { 1292 switch (s) { 1293 case BufferInfo::OWNED_BY_US: return "OUR"; 1294 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1295 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1296 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1297 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1298 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1299 default: return "?"; 1300 } 1301} 1302 1303void ACodec::dumpBuffers(OMX_U32 portIndex) { 1304 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1305 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1306 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1307 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1308 const BufferInfo &info = mBuffers[portIndex][i]; 1309 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1310 i, info.mBufferID, info.mGraphicBuffer.get(), 1311 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1312 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1313 } 1314} 1315 1316status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1317 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1318 1319 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1320 mComponentName.c_str(), info->mBufferID); 1321 1322 info->checkWriteFence("cancelBufferToNativeWindow"); 1323 int err = mNativeWindow->cancelBuffer( 1324 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1325 info->mFenceFd = -1; 1326 1327 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1328 mComponentName.c_str(), info->mBufferID); 1329 // change ownership even if cancelBuffer fails 1330 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1331 1332 return err; 1333} 1334 1335void ACodec::updateRenderInfoForDequeuedBuffer( 1336 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1337 1338 info->mRenderInfo = 1339 mRenderTracker.updateInfoForDequeuedBuffer( 1340 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1341 1342 // check for any fences already signaled 1343 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1344} 1345 1346void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1347 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1348 mRenderTracker.dumpRenderQueue(); 1349 } 1350} 1351 1352void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1353 sp<AMessage> msg = mNotify->dup(); 1354 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1355 std::list<FrameRenderTracker::Info> done = 1356 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1357 1358 // unlink untracked frames 1359 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1360 it != done.cend(); ++it) { 1361 ssize_t index = it->getIndex(); 1362 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1363 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1364 } else if (index >= 0) { 1365 // THIS SHOULD NEVER HAPPEN 1366 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1367 } 1368 } 1369 1370 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1371 msg->post(); 1372 } 1373} 1374 1375ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1376 ANativeWindowBuffer *buf; 1377 CHECK(mNativeWindow.get() != NULL); 1378 1379 if (mTunneled) { 1380 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1381 " video playback mode mode!"); 1382 return NULL; 1383 } 1384 1385 if (mFatalError) { 1386 ALOGW("not dequeuing from native window due to fatal error"); 1387 return NULL; 1388 } 1389 1390 int fenceFd = -1; 1391 do { 1392 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1393 if (err != 0) { 1394 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1395 return NULL; 1396 } 1397 1398 bool stale = false; 1399 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1400 i--; 1401 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1402 1403 if (info->mGraphicBuffer != NULL && 1404 info->mGraphicBuffer->handle == buf->handle) { 1405 // Since consumers can attach buffers to BufferQueues, it is possible 1406 // that a known yet stale buffer can return from a surface that we 1407 // once used. We can simply ignore this as we have already dequeued 1408 // this buffer properly. NOTE: this does not eliminate all cases, 1409 // e.g. it is possible that we have queued the valid buffer to the 1410 // NW, and a stale copy of the same buffer gets dequeued - which will 1411 // be treated as the valid buffer by ACodec. 1412 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1413 ALOGI("dequeued stale buffer %p. discarding", buf); 1414 stale = true; 1415 break; 1416 } 1417 1418 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1419 info->mStatus = BufferInfo::OWNED_BY_US; 1420 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1421 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1422 return info; 1423 } 1424 } 1425 1426 // It is also possible to receive a previously unregistered buffer 1427 // in non-meta mode. These should be treated as stale buffers. The 1428 // same is possible in meta mode, in which case, it will be treated 1429 // as a normal buffer, which is not desirable. 1430 // TODO: fix this. 1431 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1432 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1433 stale = true; 1434 } 1435 if (stale) { 1436 // TODO: detach stale buffer, but there is no API yet to do it. 1437 buf = NULL; 1438 } 1439 } while (buf == NULL); 1440 1441 // get oldest undequeued buffer 1442 BufferInfo *oldest = NULL; 1443 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1444 i--; 1445 BufferInfo *info = 1446 &mBuffers[kPortIndexOutput].editItemAt(i); 1447 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1448 (oldest == NULL || 1449 // avoid potential issues from counter rolling over 1450 mDequeueCounter - info->mDequeuedAt > 1451 mDequeueCounter - oldest->mDequeuedAt)) { 1452 oldest = info; 1453 } 1454 } 1455 1456 // it is impossible dequeue a buffer when there are no buffers with ANW 1457 CHECK(oldest != NULL); 1458 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1459 // while loop above does not complete 1460 CHECK(storingMetadataInDecodedBuffers()); 1461 1462 // discard buffer in LRU info and replace with new buffer 1463 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1464 oldest->mStatus = BufferInfo::OWNED_BY_US; 1465 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1466 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1467 oldest->mRenderInfo = NULL; 1468 1469 mOMX->updateGraphicBufferInMeta( 1470 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 1471 oldest->mBufferID); 1472 1473 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1474 VideoGrallocMetadata *grallocMeta = 1475 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base()); 1476 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1477 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1478 mDequeueCounter - oldest->mDequeuedAt, 1479 (void *)(uintptr_t)grallocMeta->pHandle, 1480 oldest->mGraphicBuffer->handle, oldest->mData->base()); 1481 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1482 VideoNativeMetadata *nativeMeta = 1483 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base()); 1484 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1485 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1486 mDequeueCounter - oldest->mDequeuedAt, 1487 (void *)(uintptr_t)nativeMeta->pBuffer, 1488 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base()); 1489 } 1490 1491 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1492 return oldest; 1493} 1494 1495status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1496 status_t err = OK; 1497 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1498 i--; 1499 status_t err2 = freeBuffer(portIndex, i); 1500 if (err == OK) { 1501 err = err2; 1502 } 1503 } 1504 1505 // clear mDealer even on an error 1506 mDealer[portIndex].clear(); 1507 return err; 1508} 1509 1510status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1511 status_t err = OK; 1512 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1513 i--; 1514 BufferInfo *info = 1515 &mBuffers[kPortIndexOutput].editItemAt(i); 1516 1517 // At this time some buffers may still be with the component 1518 // or being drained. 1519 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1520 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1521 status_t err2 = freeBuffer(kPortIndexOutput, i); 1522 if (err == OK) { 1523 err = err2; 1524 } 1525 } 1526 } 1527 1528 return err; 1529} 1530 1531status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1532 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1533 status_t err = OK; 1534 1535 // there should not be any fences in the metadata 1536 MetadataBufferType type = 1537 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1538 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL 1539 && info->mData->size() >= sizeof(VideoNativeMetadata)) { 1540 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; 1541 if (fenceFd >= 0) { 1542 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1543 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1544 } 1545 } 1546 1547 switch (info->mStatus) { 1548 case BufferInfo::OWNED_BY_US: 1549 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1550 (void)cancelBufferToNativeWindow(info); 1551 } 1552 // fall through 1553 1554 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1555 err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID); 1556 break; 1557 1558 default: 1559 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1560 err = FAILED_TRANSACTION; 1561 break; 1562 } 1563 1564 if (info->mFenceFd >= 0) { 1565 ::close(info->mFenceFd); 1566 } 1567 1568 if (portIndex == kPortIndexOutput) { 1569 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1570 info->mRenderInfo = NULL; 1571 } 1572 1573 // remove buffer even if mOMX->freeBuffer fails 1574 mBuffers[portIndex].removeAt(i); 1575 return err; 1576} 1577 1578ACodec::BufferInfo *ACodec::findBufferByID( 1579 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1580 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1581 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1582 1583 if (info->mBufferID == bufferID) { 1584 if (index != NULL) { 1585 *index = i; 1586 } 1587 return info; 1588 } 1589 } 1590 1591 ALOGE("Could not find buffer with ID %u", bufferID); 1592 return NULL; 1593} 1594 1595status_t ACodec::setComponentRole( 1596 bool isEncoder, const char *mime) { 1597 const char *role = getComponentRole(isEncoder, mime); 1598 if (role == NULL) { 1599 return BAD_VALUE; 1600 } 1601 status_t err = setComponentRole(mOMX, mNode, role); 1602 if (err != OK) { 1603 ALOGW("[%s] Failed to set standard component role '%s'.", 1604 mComponentName.c_str(), role); 1605 } 1606 return err; 1607} 1608 1609//static 1610const char *ACodec::getComponentRole( 1611 bool isEncoder, const char *mime) { 1612 struct MimeToRole { 1613 const char *mime; 1614 const char *decoderRole; 1615 const char *encoderRole; 1616 }; 1617 1618 static const MimeToRole kMimeToRole[] = { 1619 { MEDIA_MIMETYPE_AUDIO_MPEG, 1620 "audio_decoder.mp3", "audio_encoder.mp3" }, 1621 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I, 1622 "audio_decoder.mp1", "audio_encoder.mp1" }, 1623 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, 1624 "audio_decoder.mp2", "audio_encoder.mp2" }, 1625 { MEDIA_MIMETYPE_AUDIO_AMR_NB, 1626 "audio_decoder.amrnb", "audio_encoder.amrnb" }, 1627 { MEDIA_MIMETYPE_AUDIO_AMR_WB, 1628 "audio_decoder.amrwb", "audio_encoder.amrwb" }, 1629 { MEDIA_MIMETYPE_AUDIO_AAC, 1630 "audio_decoder.aac", "audio_encoder.aac" }, 1631 { MEDIA_MIMETYPE_AUDIO_VORBIS, 1632 "audio_decoder.vorbis", "audio_encoder.vorbis" }, 1633 { MEDIA_MIMETYPE_AUDIO_OPUS, 1634 "audio_decoder.opus", "audio_encoder.opus" }, 1635 { MEDIA_MIMETYPE_AUDIO_G711_MLAW, 1636 "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" }, 1637 { MEDIA_MIMETYPE_AUDIO_G711_ALAW, 1638 "audio_decoder.g711alaw", "audio_encoder.g711alaw" }, 1639 { MEDIA_MIMETYPE_VIDEO_AVC, 1640 "video_decoder.avc", "video_encoder.avc" }, 1641 { MEDIA_MIMETYPE_VIDEO_HEVC, 1642 "video_decoder.hevc", "video_encoder.hevc" }, 1643 { MEDIA_MIMETYPE_VIDEO_MPEG4, 1644 "video_decoder.mpeg4", "video_encoder.mpeg4" }, 1645 { MEDIA_MIMETYPE_VIDEO_H263, 1646 "video_decoder.h263", "video_encoder.h263" }, 1647 { MEDIA_MIMETYPE_VIDEO_VP8, 1648 "video_decoder.vp8", "video_encoder.vp8" }, 1649 { MEDIA_MIMETYPE_VIDEO_VP9, 1650 "video_decoder.vp9", "video_encoder.vp9" }, 1651 { MEDIA_MIMETYPE_AUDIO_RAW, 1652 "audio_decoder.raw", "audio_encoder.raw" }, 1653 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, 1654 "video_decoder.dolby-vision", "video_encoder.dolby-vision" }, 1655 { MEDIA_MIMETYPE_AUDIO_FLAC, 1656 "audio_decoder.flac", "audio_encoder.flac" }, 1657 { MEDIA_MIMETYPE_AUDIO_MSGSM, 1658 "audio_decoder.gsm", "audio_encoder.gsm" }, 1659 { MEDIA_MIMETYPE_VIDEO_MPEG2, 1660 "video_decoder.mpeg2", "video_encoder.mpeg2" }, 1661 { MEDIA_MIMETYPE_AUDIO_AC3, 1662 "audio_decoder.ac3", "audio_encoder.ac3" }, 1663 { MEDIA_MIMETYPE_AUDIO_EAC3, 1664 "audio_decoder.eac3", "audio_encoder.eac3" }, 1665 }; 1666 1667 static const size_t kNumMimeToRole = 1668 sizeof(kMimeToRole) / sizeof(kMimeToRole[0]); 1669 1670 size_t i; 1671 for (i = 0; i < kNumMimeToRole; ++i) { 1672 if (!strcasecmp(mime, kMimeToRole[i].mime)) { 1673 break; 1674 } 1675 } 1676 1677 if (i == kNumMimeToRole) { 1678 return NULL; 1679 } 1680 1681 return isEncoder ? kMimeToRole[i].encoderRole 1682 : kMimeToRole[i].decoderRole; 1683} 1684 1685//static 1686status_t ACodec::setComponentRole( 1687 const sp<IOMX> &omx, IOMX::node_id node, const char *role) { 1688 OMX_PARAM_COMPONENTROLETYPE roleParams; 1689 InitOMXParams(&roleParams); 1690 1691 strncpy((char *)roleParams.cRole, 1692 role, OMX_MAX_STRINGNAME_SIZE - 1); 1693 1694 roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0'; 1695 1696 return omx->setParameter( 1697 node, OMX_IndexParamStandardComponentRole, 1698 &roleParams, sizeof(roleParams)); 1699} 1700 1701status_t ACodec::configureCodec( 1702 const char *mime, const sp<AMessage> &msg) { 1703 int32_t encoder; 1704 if (!msg->findInt32("encoder", &encoder)) { 1705 encoder = false; 1706 } 1707 1708 sp<AMessage> inputFormat = new AMessage; 1709 sp<AMessage> outputFormat = new AMessage; 1710 mConfigFormat = msg; 1711 1712 mIsEncoder = encoder; 1713 1714 mInputMetadataType = kMetadataBufferTypeInvalid; 1715 mOutputMetadataType = kMetadataBufferTypeInvalid; 1716 1717 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1718 1719 if (err != OK) { 1720 return err; 1721 } 1722 1723 int32_t bitRate = 0; 1724 // FLAC encoder doesn't need a bitrate, other encoders do 1725 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1726 && !msg->findInt32("bitrate", &bitRate)) { 1727 return INVALID_OPERATION; 1728 } 1729 1730 int32_t storeMeta; 1731 if (encoder 1732 && msg->findInt32("android._input-metadata-buffer-type", &storeMeta) 1733 && storeMeta != kMetadataBufferTypeInvalid) { 1734 mInputMetadataType = (MetadataBufferType)storeMeta; 1735 err = mOMX->storeMetaDataInBuffers( 1736 mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1737 if (err != OK) { 1738 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1739 mComponentName.c_str(), err); 1740 1741 return err; 1742 } else if (storeMeta == kMetadataBufferTypeANWBuffer 1743 && mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1744 // IOMX translates ANWBuffers to gralloc source already. 1745 mInputMetadataType = (MetadataBufferType)storeMeta; 1746 } 1747 1748 uint32_t usageBits; 1749 if (mOMX->getParameter( 1750 mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1751 &usageBits, sizeof(usageBits)) == OK) { 1752 inputFormat->setInt32( 1753 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1754 } 1755 } 1756 1757 int32_t prependSPSPPS = 0; 1758 if (encoder 1759 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1760 && prependSPSPPS != 0) { 1761 OMX_INDEXTYPE index; 1762 err = mOMX->getExtensionIndex( 1763 mNode, 1764 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1765 &index); 1766 1767 if (err == OK) { 1768 PrependSPSPPSToIDRFramesParams params; 1769 InitOMXParams(¶ms); 1770 params.bEnable = OMX_TRUE; 1771 1772 err = mOMX->setParameter( 1773 mNode, index, ¶ms, sizeof(params)); 1774 } 1775 1776 if (err != OK) { 1777 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1778 "IDR frames. (err %d)", err); 1779 1780 return err; 1781 } 1782 } 1783 1784 // Only enable metadata mode on encoder output if encoder can prepend 1785 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1786 // opaque handle, to which we don't have access. 1787 int32_t video = !strncasecmp(mime, "video/", 6); 1788 mIsVideo = video; 1789 if (encoder && video) { 1790 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1791 && msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta) 1792 && storeMeta != 0); 1793 1794 mOutputMetadataType = kMetadataBufferTypeNativeHandleSource; 1795 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType); 1796 if (err != OK) { 1797 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1798 mComponentName.c_str(), err); 1799 } 1800 1801 if (!msg->findInt64( 1802 "repeat-previous-frame-after", 1803 &mRepeatFrameDelayUs)) { 1804 mRepeatFrameDelayUs = -1ll; 1805 } 1806 1807 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1808 mMaxPtsGapUs = -1ll; 1809 } 1810 1811 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1812 mMaxFps = -1; 1813 } 1814 1815 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1816 mTimePerCaptureUs = -1ll; 1817 } 1818 1819 if (!msg->findInt32( 1820 "create-input-buffers-suspended", 1821 (int32_t*)&mCreateInputBuffersSuspended)) { 1822 mCreateInputBuffersSuspended = false; 1823 } 1824 } 1825 1826 // NOTE: we only use native window for video decoders 1827 sp<RefBase> obj; 1828 bool haveNativeWindow = msg->findObject("native-window", &obj) 1829 && obj != NULL && video && !encoder; 1830 mUsingNativeWindow = haveNativeWindow; 1831 mLegacyAdaptiveExperiment = false; 1832 if (video && !encoder) { 1833 inputFormat->setInt32("adaptive-playback", false); 1834 1835 int32_t usageProtected; 1836 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1837 if (!haveNativeWindow) { 1838 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1839 return PERMISSION_DENIED; 1840 } 1841 mFlags |= kFlagIsGrallocUsageProtected; 1842 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1843 } 1844 1845 if (mFlags & kFlagIsSecure) { 1846 // use native_handles for secure input buffers 1847 err = mOMX->enableNativeBuffers( 1848 mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1849 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1850 err = OK; // ignore error for now 1851 } 1852 } 1853 if (haveNativeWindow) { 1854 sp<ANativeWindow> nativeWindow = 1855 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1856 1857 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1858 int32_t autoFrc; 1859 if (msg->findInt32("auto-frc", &autoFrc)) { 1860 bool enabled = autoFrc; 1861 OMX_CONFIG_BOOLEANTYPE config; 1862 InitOMXParams(&config); 1863 config.bEnabled = (OMX_BOOL)enabled; 1864 status_t temp = mOMX->setConfig( 1865 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1866 &config, sizeof(config)); 1867 if (temp == OK) { 1868 outputFormat->setInt32("auto-frc", enabled); 1869 } else if (enabled) { 1870 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1871 } 1872 } 1873 // END of temporary support for automatic FRC 1874 1875 int32_t tunneled; 1876 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1877 tunneled != 0) { 1878 ALOGI("Configuring TUNNELED video playback."); 1879 mTunneled = true; 1880 1881 int32_t audioHwSync = 0; 1882 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1883 ALOGW("No Audio HW Sync provided for video tunnel"); 1884 } 1885 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1886 if (err != OK) { 1887 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1888 audioHwSync, nativeWindow.get()); 1889 return err; 1890 } 1891 1892 int32_t maxWidth = 0, maxHeight = 0; 1893 if (msg->findInt32("max-width", &maxWidth) && 1894 msg->findInt32("max-height", &maxHeight)) { 1895 1896 err = mOMX->prepareForAdaptivePlayback( 1897 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1898 if (err != OK) { 1899 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1900 mComponentName.c_str(), err); 1901 // allow failure 1902 err = OK; 1903 } else { 1904 inputFormat->setInt32("max-width", maxWidth); 1905 inputFormat->setInt32("max-height", maxHeight); 1906 inputFormat->setInt32("adaptive-playback", true); 1907 } 1908 } 1909 } else { 1910 ALOGV("Configuring CPU controlled video playback."); 1911 mTunneled = false; 1912 1913 // Explicity reset the sideband handle of the window for 1914 // non-tunneled video in case the window was previously used 1915 // for a tunneled video playback. 1916 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1917 if (err != OK) { 1918 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1919 return err; 1920 } 1921 1922 // Always try to enable dynamic output buffers on native surface 1923 mOutputMetadataType = kMetadataBufferTypeANWBuffer; 1924 err = mOMX->storeMetaDataInBuffers( 1925 mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1926 if (err != OK) { 1927 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1928 mComponentName.c_str(), err); 1929 1930 // if adaptive playback has been requested, try JB fallback 1931 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1932 // LARGE MEMORY REQUIREMENT 1933 1934 // we will not do adaptive playback on software accessed 1935 // surfaces as they never had to respond to changes in the 1936 // crop window, and we don't trust that they will be able to. 1937 int usageBits = 0; 1938 bool canDoAdaptivePlayback; 1939 1940 if (nativeWindow->query( 1941 nativeWindow.get(), 1942 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1943 &usageBits) != OK) { 1944 canDoAdaptivePlayback = false; 1945 } else { 1946 canDoAdaptivePlayback = 1947 (usageBits & 1948 (GRALLOC_USAGE_SW_READ_MASK | 1949 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1950 } 1951 1952 int32_t maxWidth = 0, maxHeight = 0; 1953 if (canDoAdaptivePlayback && 1954 msg->findInt32("max-width", &maxWidth) && 1955 msg->findInt32("max-height", &maxHeight)) { 1956 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1957 mComponentName.c_str(), maxWidth, maxHeight); 1958 1959 err = mOMX->prepareForAdaptivePlayback( 1960 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1961 maxHeight); 1962 ALOGW_IF(err != OK, 1963 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1964 mComponentName.c_str(), err); 1965 1966 if (err == OK) { 1967 inputFormat->setInt32("max-width", maxWidth); 1968 inputFormat->setInt32("max-height", maxHeight); 1969 inputFormat->setInt32("adaptive-playback", true); 1970 } 1971 } 1972 // allow failure 1973 err = OK; 1974 } else { 1975 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1976 mComponentName.c_str()); 1977 CHECK(storingMetadataInDecodedBuffers()); 1978 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 1979 "legacy-adaptive", !msg->contains("no-experiments")); 1980 1981 inputFormat->setInt32("adaptive-playback", true); 1982 } 1983 1984 int32_t push; 1985 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 1986 && push != 0) { 1987 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1988 } 1989 } 1990 1991 int32_t rotationDegrees; 1992 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 1993 mRotationDegrees = rotationDegrees; 1994 } else { 1995 mRotationDegrees = 0; 1996 } 1997 } 1998 1999 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 2000 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 2001 // invalid encodings will default to PCM-16bit in setupRawAudioFormat. 2002 2003 if (video) { 2004 // determine need for software renderer 2005 bool usingSwRenderer = false; 2006 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 2007 usingSwRenderer = true; 2008 haveNativeWindow = false; 2009 } 2010 2011 if (encoder) { 2012 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 2013 } else { 2014 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2015 } 2016 2017 if (err != OK) { 2018 return err; 2019 } 2020 2021 if (haveNativeWindow) { 2022 mNativeWindow = static_cast<Surface *>(obj.get()); 2023 } 2024 2025 // initialize native window now to get actual output format 2026 // TODO: this is needed for some encoders even though they don't use native window 2027 err = initNativeWindow(); 2028 if (err != OK) { 2029 return err; 2030 } 2031 2032 // fallback for devices that do not handle flex-YUV for native buffers 2033 if (haveNativeWindow) { 2034 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 2035 if (msg->findInt32("color-format", &requestedColorFormat) && 2036 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 2037 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 2038 if (err != OK) { 2039 return err; 2040 } 2041 int32_t colorFormat = OMX_COLOR_FormatUnused; 2042 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 2043 if (!outputFormat->findInt32("color-format", &colorFormat)) { 2044 ALOGE("ouptut port did not have a color format (wrong domain?)"); 2045 return BAD_VALUE; 2046 } 2047 ALOGD("[%s] Requested output format %#x and got %#x.", 2048 mComponentName.c_str(), requestedColorFormat, colorFormat); 2049 if (!isFlexibleColorFormat( 2050 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 2051 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 2052 // device did not handle flex-YUV request for native window, fall back 2053 // to SW renderer 2054 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 2055 mNativeWindow.clear(); 2056 mNativeWindowUsageBits = 0; 2057 haveNativeWindow = false; 2058 usingSwRenderer = true; 2059 if (storingMetadataInDecodedBuffers()) { 2060 err = mOMX->storeMetaDataInBuffers( 2061 mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 2062 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 2063 // TODO: implement adaptive-playback support for bytebuffer mode. 2064 // This is done by SW codecs, but most HW codecs don't support it. 2065 inputFormat->setInt32("adaptive-playback", false); 2066 } 2067 if (err == OK) { 2068 err = mOMX->enableNativeBuffers( 2069 mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 2070 } 2071 if (mFlags & kFlagIsGrallocUsageProtected) { 2072 // fallback is not supported for protected playback 2073 err = PERMISSION_DENIED; 2074 } else if (err == OK) { 2075 err = setupVideoDecoder( 2076 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2077 } 2078 } 2079 } 2080 } 2081 2082 if (usingSwRenderer) { 2083 outputFormat->setInt32("using-sw-renderer", 1); 2084 } 2085 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2086 int32_t numChannels, sampleRate; 2087 if (!msg->findInt32("channel-count", &numChannels) 2088 || !msg->findInt32("sample-rate", &sampleRate)) { 2089 // Since we did not always check for these, leave them optional 2090 // and have the decoder figure it all out. 2091 err = OK; 2092 } else { 2093 err = setupRawAudioFormat( 2094 encoder ? kPortIndexInput : kPortIndexOutput, 2095 sampleRate, 2096 numChannels); 2097 } 2098 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2099 int32_t numChannels, sampleRate; 2100 if (!msg->findInt32("channel-count", &numChannels) 2101 || !msg->findInt32("sample-rate", &sampleRate)) { 2102 err = INVALID_OPERATION; 2103 } else { 2104 int32_t isADTS, aacProfile; 2105 int32_t sbrMode; 2106 int32_t maxOutputChannelCount; 2107 int32_t pcmLimiterEnable; 2108 drcParams_t drc; 2109 if (!msg->findInt32("is-adts", &isADTS)) { 2110 isADTS = 0; 2111 } 2112 if (!msg->findInt32("aac-profile", &aacProfile)) { 2113 aacProfile = OMX_AUDIO_AACObjectNull; 2114 } 2115 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2116 sbrMode = -1; 2117 } 2118 2119 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2120 maxOutputChannelCount = -1; 2121 } 2122 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2123 // value is unknown 2124 pcmLimiterEnable = -1; 2125 } 2126 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2127 // value is unknown 2128 drc.encodedTargetLevel = -1; 2129 } 2130 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2131 // value is unknown 2132 drc.drcCut = -1; 2133 } 2134 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2135 // value is unknown 2136 drc.drcBoost = -1; 2137 } 2138 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2139 // value is unknown 2140 drc.heavyCompression = -1; 2141 } 2142 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2143 // value is unknown 2144 drc.targetRefLevel = -1; 2145 } 2146 2147 err = setupAACCodec( 2148 encoder, numChannels, sampleRate, bitRate, aacProfile, 2149 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2150 pcmLimiterEnable); 2151 } 2152 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2153 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2154 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2155 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2156 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2157 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2158 // These are PCM-like formats with a fixed sample rate but 2159 // a variable number of channels. 2160 2161 int32_t numChannels; 2162 if (!msg->findInt32("channel-count", &numChannels)) { 2163 err = INVALID_OPERATION; 2164 } else { 2165 int32_t sampleRate; 2166 if (!msg->findInt32("sample-rate", &sampleRate)) { 2167 sampleRate = 8000; 2168 } 2169 err = setupG711Codec(encoder, sampleRate, numChannels); 2170 } 2171 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2172 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2173 if (encoder && 2174 (!msg->findInt32("channel-count", &numChannels) 2175 || !msg->findInt32("sample-rate", &sampleRate))) { 2176 ALOGE("missing channel count or sample rate for FLAC encoder"); 2177 err = INVALID_OPERATION; 2178 } else { 2179 if (encoder) { 2180 if (!msg->findInt32( 2181 "complexity", &compressionLevel) && 2182 !msg->findInt32( 2183 "flac-compression-level", &compressionLevel)) { 2184 compressionLevel = 5; // default FLAC compression level 2185 } else if (compressionLevel < 0) { 2186 ALOGW("compression level %d outside [0..8] range, " 2187 "using 0", 2188 compressionLevel); 2189 compressionLevel = 0; 2190 } else if (compressionLevel > 8) { 2191 ALOGW("compression level %d outside [0..8] range, " 2192 "using 8", 2193 compressionLevel); 2194 compressionLevel = 8; 2195 } 2196 } 2197 err = setupFlacCodec( 2198 encoder, numChannels, sampleRate, compressionLevel); 2199 } 2200 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2201 int32_t numChannels, sampleRate; 2202 if (encoder 2203 || !msg->findInt32("channel-count", &numChannels) 2204 || !msg->findInt32("sample-rate", &sampleRate)) { 2205 err = INVALID_OPERATION; 2206 } else { 2207 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); 2208 } 2209 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2210 int32_t numChannels; 2211 int32_t sampleRate; 2212 if (!msg->findInt32("channel-count", &numChannels) 2213 || !msg->findInt32("sample-rate", &sampleRate)) { 2214 err = INVALID_OPERATION; 2215 } else { 2216 err = setupAC3Codec(encoder, numChannels, sampleRate); 2217 } 2218 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2219 int32_t numChannels; 2220 int32_t sampleRate; 2221 if (!msg->findInt32("channel-count", &numChannels) 2222 || !msg->findInt32("sample-rate", &sampleRate)) { 2223 err = INVALID_OPERATION; 2224 } else { 2225 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2226 } 2227 } 2228 2229 if (err != OK) { 2230 return err; 2231 } 2232 2233 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2234 mEncoderDelay = 0; 2235 } 2236 2237 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2238 mEncoderPadding = 0; 2239 } 2240 2241 if (msg->findInt32("channel-mask", &mChannelMask)) { 2242 mChannelMaskPresent = true; 2243 } else { 2244 mChannelMaskPresent = false; 2245 } 2246 2247 int32_t maxInputSize; 2248 if (msg->findInt32("max-input-size", &maxInputSize)) { 2249 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2250 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2251 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2252 } 2253 2254 int32_t priority; 2255 if (msg->findInt32("priority", &priority)) { 2256 err = setPriority(priority); 2257 } 2258 2259 int32_t rateInt = -1; 2260 float rateFloat = -1; 2261 if (!msg->findFloat("operating-rate", &rateFloat)) { 2262 msg->findInt32("operating-rate", &rateInt); 2263 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2264 } 2265 if (rateFloat > 0) { 2266 err = setOperatingRate(rateFloat, video); 2267 } 2268 2269 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame. 2270 mBaseOutputFormat = outputFormat; 2271 // trigger a kWhatOutputFormatChanged msg on first buffer 2272 mLastOutputFormat.clear(); 2273 2274 err = getPortFormat(kPortIndexInput, inputFormat); 2275 if (err == OK) { 2276 err = getPortFormat(kPortIndexOutput, outputFormat); 2277 if (err == OK) { 2278 mInputFormat = inputFormat; 2279 mOutputFormat = outputFormat; 2280 } 2281 } 2282 2283 // create data converters if needed 2284 if (!video && err == OK) { 2285 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 2286 if (encoder) { 2287 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2288 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); 2289 if (mConverter[kPortIndexInput] != NULL) { 2290 mInputFormat->setInt32("pcm-encoding", pcmEncoding); 2291 } 2292 } else { 2293 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2294 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 2295 if (mConverter[kPortIndexOutput] != NULL) { 2296 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 2297 } 2298 } 2299 } 2300 2301 return err; 2302} 2303 2304status_t ACodec::setPriority(int32_t priority) { 2305 if (priority < 0) { 2306 return BAD_VALUE; 2307 } 2308 OMX_PARAM_U32TYPE config; 2309 InitOMXParams(&config); 2310 config.nU32 = (OMX_U32)priority; 2311 status_t temp = mOMX->setConfig( 2312 mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2313 &config, sizeof(config)); 2314 if (temp != OK) { 2315 ALOGI("codec does not support config priority (err %d)", temp); 2316 } 2317 return OK; 2318} 2319 2320status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2321 if (rateFloat < 0) { 2322 return BAD_VALUE; 2323 } 2324 OMX_U32 rate; 2325 if (isVideo) { 2326 if (rateFloat > 65535) { 2327 return BAD_VALUE; 2328 } 2329 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2330 } else { 2331 if (rateFloat > UINT_MAX) { 2332 return BAD_VALUE; 2333 } 2334 rate = (OMX_U32)(rateFloat); 2335 } 2336 OMX_PARAM_U32TYPE config; 2337 InitOMXParams(&config); 2338 config.nU32 = rate; 2339 status_t err = mOMX->setConfig( 2340 mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2341 &config, sizeof(config)); 2342 if (err != OK) { 2343 ALOGI("codec does not support config operating rate (err %d)", err); 2344 } 2345 return OK; 2346} 2347 2348status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2349 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2350 InitOMXParams(¶ms); 2351 params.nPortIndex = kPortIndexOutput; 2352 status_t err = mOMX->getConfig( 2353 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2354 if (err == OK) { 2355 *intraRefreshPeriod = params.nRefreshPeriod; 2356 return OK; 2357 } 2358 2359 // Fallback to query through standard OMX index. 2360 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2361 InitOMXParams(&refreshParams); 2362 refreshParams.nPortIndex = kPortIndexOutput; 2363 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2364 err = mOMX->getParameter( 2365 mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2366 if (err != OK || refreshParams.nCirMBs == 0) { 2367 *intraRefreshPeriod = 0; 2368 return OK; 2369 } 2370 2371 // Calculate period based on width and height 2372 uint32_t width, height; 2373 OMX_PARAM_PORTDEFINITIONTYPE def; 2374 InitOMXParams(&def); 2375 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2376 def.nPortIndex = kPortIndexOutput; 2377 err = mOMX->getParameter( 2378 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2379 if (err != OK) { 2380 *intraRefreshPeriod = 0; 2381 return err; 2382 } 2383 width = video_def->nFrameWidth; 2384 height = video_def->nFrameHeight; 2385 // Use H.264/AVC MacroBlock size 16x16 2386 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2387 2388 return OK; 2389} 2390 2391status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2392 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2393 InitOMXParams(¶ms); 2394 params.nPortIndex = kPortIndexOutput; 2395 params.nRefreshPeriod = intraRefreshPeriod; 2396 status_t err = mOMX->setConfig( 2397 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2398 if (err == OK) { 2399 return OK; 2400 } 2401 2402 // Only in configure state, a component could invoke setParameter. 2403 if (!inConfigure) { 2404 return INVALID_OPERATION; 2405 } else { 2406 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2407 } 2408 2409 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2410 InitOMXParams(&refreshParams); 2411 refreshParams.nPortIndex = kPortIndexOutput; 2412 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2413 2414 if (intraRefreshPeriod == 0) { 2415 // 0 means disable intra refresh. 2416 refreshParams.nCirMBs = 0; 2417 } else { 2418 // Calculate macroblocks that need to be intra coded base on width and height 2419 uint32_t width, height; 2420 OMX_PARAM_PORTDEFINITIONTYPE def; 2421 InitOMXParams(&def); 2422 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2423 def.nPortIndex = kPortIndexOutput; 2424 err = mOMX->getParameter( 2425 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2426 if (err != OK) { 2427 return err; 2428 } 2429 width = video_def->nFrameWidth; 2430 height = video_def->nFrameHeight; 2431 // Use H.264/AVC MacroBlock size 16x16 2432 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2433 } 2434 2435 err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh, 2436 &refreshParams, sizeof(refreshParams)); 2437 if (err != OK) { 2438 return err; 2439 } 2440 2441 return OK; 2442} 2443 2444status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2445 OMX_PARAM_PORTDEFINITIONTYPE def; 2446 InitOMXParams(&def); 2447 def.nPortIndex = portIndex; 2448 2449 status_t err = mOMX->getParameter( 2450 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2451 2452 if (err != OK) { 2453 return err; 2454 } 2455 2456 if (def.nBufferSize >= size) { 2457 return OK; 2458 } 2459 2460 def.nBufferSize = size; 2461 2462 err = mOMX->setParameter( 2463 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2464 2465 if (err != OK) { 2466 return err; 2467 } 2468 2469 err = mOMX->getParameter( 2470 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2471 2472 if (err != OK) { 2473 return err; 2474 } 2475 2476 if (def.nBufferSize < size) { 2477 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2478 return FAILED_TRANSACTION; 2479 } 2480 2481 return OK; 2482} 2483 2484status_t ACodec::selectAudioPortFormat( 2485 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2486 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2487 InitOMXParams(&format); 2488 2489 format.nPortIndex = portIndex; 2490 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2491 format.nIndex = index; 2492 status_t err = mOMX->getParameter( 2493 mNode, OMX_IndexParamAudioPortFormat, 2494 &format, sizeof(format)); 2495 2496 if (err != OK) { 2497 return err; 2498 } 2499 2500 if (format.eEncoding == desiredFormat) { 2501 break; 2502 } 2503 2504 if (index == kMaxIndicesToCheck) { 2505 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2506 mComponentName.c_str(), index, 2507 asString(format.eEncoding), format.eEncoding); 2508 return ERROR_UNSUPPORTED; 2509 } 2510 } 2511 2512 return mOMX->setParameter( 2513 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2514} 2515 2516status_t ACodec::setupAACCodec( 2517 bool encoder, int32_t numChannels, int32_t sampleRate, 2518 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2519 int32_t maxOutputChannelCount, const drcParams_t& drc, 2520 int32_t pcmLimiterEnable) { 2521 if (encoder && isADTS) { 2522 return -EINVAL; 2523 } 2524 2525 status_t err = setupRawAudioFormat( 2526 encoder ? kPortIndexInput : kPortIndexOutput, 2527 sampleRate, 2528 numChannels); 2529 2530 if (err != OK) { 2531 return err; 2532 } 2533 2534 if (encoder) { 2535 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2536 2537 if (err != OK) { 2538 return err; 2539 } 2540 2541 OMX_PARAM_PORTDEFINITIONTYPE def; 2542 InitOMXParams(&def); 2543 def.nPortIndex = kPortIndexOutput; 2544 2545 err = mOMX->getParameter( 2546 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2547 2548 if (err != OK) { 2549 return err; 2550 } 2551 2552 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2553 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2554 2555 err = mOMX->setParameter( 2556 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2557 2558 if (err != OK) { 2559 return err; 2560 } 2561 2562 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2563 InitOMXParams(&profile); 2564 profile.nPortIndex = kPortIndexOutput; 2565 2566 err = mOMX->getParameter( 2567 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2568 2569 if (err != OK) { 2570 return err; 2571 } 2572 2573 profile.nChannels = numChannels; 2574 2575 profile.eChannelMode = 2576 (numChannels == 1) 2577 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2578 2579 profile.nSampleRate = sampleRate; 2580 profile.nBitRate = bitRate; 2581 profile.nAudioBandWidth = 0; 2582 profile.nFrameLength = 0; 2583 profile.nAACtools = OMX_AUDIO_AACToolAll; 2584 profile.nAACERtools = OMX_AUDIO_AACERNone; 2585 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2586 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2587 switch (sbrMode) { 2588 case 0: 2589 // disable sbr 2590 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2591 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2592 break; 2593 case 1: 2594 // enable single-rate sbr 2595 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2596 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2597 break; 2598 case 2: 2599 // enable dual-rate sbr 2600 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2601 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2602 break; 2603 case -1: 2604 // enable both modes -> the codec will decide which mode should be used 2605 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2606 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2607 break; 2608 default: 2609 // unsupported sbr mode 2610 return BAD_VALUE; 2611 } 2612 2613 2614 err = mOMX->setParameter( 2615 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2616 2617 if (err != OK) { 2618 return err; 2619 } 2620 2621 return err; 2622 } 2623 2624 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2625 InitOMXParams(&profile); 2626 profile.nPortIndex = kPortIndexInput; 2627 2628 err = mOMX->getParameter( 2629 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2630 2631 if (err != OK) { 2632 return err; 2633 } 2634 2635 profile.nChannels = numChannels; 2636 profile.nSampleRate = sampleRate; 2637 2638 profile.eAACStreamFormat = 2639 isADTS 2640 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2641 : OMX_AUDIO_AACStreamFormatMP4FF; 2642 2643 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2644 InitOMXParams(&presentation); 2645 presentation.nMaxOutputChannels = maxOutputChannelCount; 2646 presentation.nDrcCut = drc.drcCut; 2647 presentation.nDrcBoost = drc.drcBoost; 2648 presentation.nHeavyCompression = drc.heavyCompression; 2649 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2650 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2651 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2652 2653 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2654 if (res == OK) { 2655 // optional parameters, will not cause configuration failure 2656 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2657 &presentation, sizeof(presentation)); 2658 } else { 2659 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2660 } 2661 mSampleRate = sampleRate; 2662 return res; 2663} 2664 2665status_t ACodec::setupAC3Codec( 2666 bool encoder, int32_t numChannels, int32_t sampleRate) { 2667 status_t err = setupRawAudioFormat( 2668 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2669 2670 if (err != OK) { 2671 return err; 2672 } 2673 2674 if (encoder) { 2675 ALOGW("AC3 encoding is not supported."); 2676 return INVALID_OPERATION; 2677 } 2678 2679 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2680 InitOMXParams(&def); 2681 def.nPortIndex = kPortIndexInput; 2682 2683 err = mOMX->getParameter( 2684 mNode, 2685 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2686 &def, 2687 sizeof(def)); 2688 2689 if (err != OK) { 2690 return err; 2691 } 2692 2693 def.nChannels = numChannels; 2694 def.nSampleRate = sampleRate; 2695 2696 return mOMX->setParameter( 2697 mNode, 2698 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2699 &def, 2700 sizeof(def)); 2701} 2702 2703status_t ACodec::setupEAC3Codec( 2704 bool encoder, int32_t numChannels, int32_t sampleRate) { 2705 status_t err = setupRawAudioFormat( 2706 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2707 2708 if (err != OK) { 2709 return err; 2710 } 2711 2712 if (encoder) { 2713 ALOGW("EAC3 encoding is not supported."); 2714 return INVALID_OPERATION; 2715 } 2716 2717 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2718 InitOMXParams(&def); 2719 def.nPortIndex = kPortIndexInput; 2720 2721 err = mOMX->getParameter( 2722 mNode, 2723 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2724 &def, 2725 sizeof(def)); 2726 2727 if (err != OK) { 2728 return err; 2729 } 2730 2731 def.nChannels = numChannels; 2732 def.nSampleRate = sampleRate; 2733 2734 return mOMX->setParameter( 2735 mNode, 2736 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2737 &def, 2738 sizeof(def)); 2739} 2740 2741static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2742 bool isAMRWB, int32_t bps) { 2743 if (isAMRWB) { 2744 if (bps <= 6600) { 2745 return OMX_AUDIO_AMRBandModeWB0; 2746 } else if (bps <= 8850) { 2747 return OMX_AUDIO_AMRBandModeWB1; 2748 } else if (bps <= 12650) { 2749 return OMX_AUDIO_AMRBandModeWB2; 2750 } else if (bps <= 14250) { 2751 return OMX_AUDIO_AMRBandModeWB3; 2752 } else if (bps <= 15850) { 2753 return OMX_AUDIO_AMRBandModeWB4; 2754 } else if (bps <= 18250) { 2755 return OMX_AUDIO_AMRBandModeWB5; 2756 } else if (bps <= 19850) { 2757 return OMX_AUDIO_AMRBandModeWB6; 2758 } else if (bps <= 23050) { 2759 return OMX_AUDIO_AMRBandModeWB7; 2760 } 2761 2762 // 23850 bps 2763 return OMX_AUDIO_AMRBandModeWB8; 2764 } else { // AMRNB 2765 if (bps <= 4750) { 2766 return OMX_AUDIO_AMRBandModeNB0; 2767 } else if (bps <= 5150) { 2768 return OMX_AUDIO_AMRBandModeNB1; 2769 } else if (bps <= 5900) { 2770 return OMX_AUDIO_AMRBandModeNB2; 2771 } else if (bps <= 6700) { 2772 return OMX_AUDIO_AMRBandModeNB3; 2773 } else if (bps <= 7400) { 2774 return OMX_AUDIO_AMRBandModeNB4; 2775 } else if (bps <= 7950) { 2776 return OMX_AUDIO_AMRBandModeNB5; 2777 } else if (bps <= 10200) { 2778 return OMX_AUDIO_AMRBandModeNB6; 2779 } 2780 2781 // 12200 bps 2782 return OMX_AUDIO_AMRBandModeNB7; 2783 } 2784} 2785 2786status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2787 OMX_AUDIO_PARAM_AMRTYPE def; 2788 InitOMXParams(&def); 2789 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2790 2791 status_t err = 2792 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2793 2794 if (err != OK) { 2795 return err; 2796 } 2797 2798 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2799 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2800 2801 err = mOMX->setParameter( 2802 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2803 2804 if (err != OK) { 2805 return err; 2806 } 2807 2808 return setupRawAudioFormat( 2809 encoder ? kPortIndexInput : kPortIndexOutput, 2810 isWAMR ? 16000 : 8000 /* sampleRate */, 2811 1 /* numChannels */); 2812} 2813 2814status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2815 if (encoder) { 2816 return INVALID_OPERATION; 2817 } 2818 2819 return setupRawAudioFormat( 2820 kPortIndexInput, sampleRate, numChannels); 2821} 2822 2823status_t ACodec::setupFlacCodec( 2824 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2825 2826 if (encoder) { 2827 OMX_AUDIO_PARAM_FLACTYPE def; 2828 InitOMXParams(&def); 2829 def.nPortIndex = kPortIndexOutput; 2830 2831 // configure compression level 2832 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2833 if (err != OK) { 2834 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2835 return err; 2836 } 2837 def.nCompressionLevel = compressionLevel; 2838 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2839 if (err != OK) { 2840 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2841 return err; 2842 } 2843 } 2844 2845 return setupRawAudioFormat( 2846 encoder ? kPortIndexInput : kPortIndexOutput, 2847 sampleRate, 2848 numChannels); 2849} 2850 2851status_t ACodec::setupRawAudioFormat( 2852 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { 2853 OMX_PARAM_PORTDEFINITIONTYPE def; 2854 InitOMXParams(&def); 2855 def.nPortIndex = portIndex; 2856 2857 status_t err = mOMX->getParameter( 2858 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2859 2860 if (err != OK) { 2861 return err; 2862 } 2863 2864 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2865 2866 err = mOMX->setParameter( 2867 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2868 2869 if (err != OK) { 2870 return err; 2871 } 2872 2873 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2874 InitOMXParams(&pcmParams); 2875 pcmParams.nPortIndex = portIndex; 2876 2877 err = mOMX->getParameter( 2878 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2879 2880 if (err != OK) { 2881 return err; 2882 } 2883 2884 pcmParams.nChannels = numChannels; 2885 switch (encoding) { 2886 case kAudioEncodingPcm8bit: 2887 pcmParams.eNumData = OMX_NumericalDataUnsigned; 2888 pcmParams.nBitPerSample = 8; 2889 break; 2890 case kAudioEncodingPcmFloat: 2891 pcmParams.eNumData = OMX_NumericalDataFloat; 2892 pcmParams.nBitPerSample = 32; 2893 break; 2894 case kAudioEncodingPcm16bit: 2895 pcmParams.eNumData = OMX_NumericalDataSigned; 2896 pcmParams.nBitPerSample = 16; 2897 break; 2898 default: 2899 return BAD_VALUE; 2900 } 2901 pcmParams.bInterleaved = OMX_TRUE; 2902 pcmParams.nSamplingRate = sampleRate; 2903 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 2904 2905 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 2906 return OMX_ErrorNone; 2907 } 2908 2909 err = mOMX->setParameter( 2910 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2911 // if we could not set up raw format to non-16-bit, try with 16-bit 2912 // NOTE: we will also verify this via readback, in case codec ignores these fields 2913 if (err != OK && encoding != kAudioEncodingPcm16bit) { 2914 pcmParams.eNumData = OMX_NumericalDataSigned; 2915 pcmParams.nBitPerSample = 16; 2916 err = mOMX->setParameter( 2917 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2918 } 2919 return err; 2920} 2921 2922status_t ACodec::configureTunneledVideoPlayback( 2923 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 2924 native_handle_t* sidebandHandle; 2925 2926 status_t err = mOMX->configureVideoTunnelMode( 2927 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 2928 if (err != OK) { 2929 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 2930 return err; 2931 } 2932 2933 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 2934 if (err != OK) { 2935 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 2936 sidebandHandle, err); 2937 return err; 2938 } 2939 2940 return OK; 2941} 2942 2943status_t ACodec::setVideoPortFormatType( 2944 OMX_U32 portIndex, 2945 OMX_VIDEO_CODINGTYPE compressionFormat, 2946 OMX_COLOR_FORMATTYPE colorFormat, 2947 bool usingNativeBuffers) { 2948 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2949 InitOMXParams(&format); 2950 format.nPortIndex = portIndex; 2951 format.nIndex = 0; 2952 bool found = false; 2953 2954 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2955 format.nIndex = index; 2956 status_t err = mOMX->getParameter( 2957 mNode, OMX_IndexParamVideoPortFormat, 2958 &format, sizeof(format)); 2959 2960 if (err != OK) { 2961 return err; 2962 } 2963 2964 // substitute back flexible color format to codec supported format 2965 OMX_U32 flexibleEquivalent; 2966 if (compressionFormat == OMX_VIDEO_CodingUnused 2967 && isFlexibleColorFormat( 2968 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 2969 && colorFormat == flexibleEquivalent) { 2970 ALOGI("[%s] using color format %#x in place of %#x", 2971 mComponentName.c_str(), format.eColorFormat, colorFormat); 2972 colorFormat = format.eColorFormat; 2973 } 2974 2975 // The following assertion is violated by TI's video decoder. 2976 // CHECK_EQ(format.nIndex, index); 2977 2978 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 2979 if (portIndex == kPortIndexInput 2980 && colorFormat == format.eColorFormat) { 2981 // eCompressionFormat does not seem right. 2982 found = true; 2983 break; 2984 } 2985 if (portIndex == kPortIndexOutput 2986 && compressionFormat == format.eCompressionFormat) { 2987 // eColorFormat does not seem right. 2988 found = true; 2989 break; 2990 } 2991 } 2992 2993 if (format.eCompressionFormat == compressionFormat 2994 && format.eColorFormat == colorFormat) { 2995 found = true; 2996 break; 2997 } 2998 2999 if (index == kMaxIndicesToCheck) { 3000 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 3001 mComponentName.c_str(), index, 3002 asString(format.eCompressionFormat), format.eCompressionFormat, 3003 asString(format.eColorFormat), format.eColorFormat); 3004 } 3005 } 3006 3007 if (!found) { 3008 return UNKNOWN_ERROR; 3009 } 3010 3011 status_t err = mOMX->setParameter( 3012 mNode, OMX_IndexParamVideoPortFormat, 3013 &format, sizeof(format)); 3014 3015 return err; 3016} 3017 3018// Set optimal output format. OMX component lists output formats in the order 3019// of preference, but this got more complicated since the introduction of flexible 3020// YUV formats. We support a legacy behavior for applications that do not use 3021// surface output, do not specify an output format, but expect a "usable" standard 3022// OMX format. SW readable and standard formats must be flex-YUV. 3023// 3024// Suggested preference order: 3025// - optimal format for texture rendering (mediaplayer behavior) 3026// - optimal SW readable & texture renderable format (flex-YUV support) 3027// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 3028// - legacy "usable" standard formats 3029// 3030// For legacy support, we prefer a standard format, but will settle for a SW readable 3031// flex-YUV format. 3032status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 3033 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 3034 InitOMXParams(&format); 3035 format.nPortIndex = kPortIndexOutput; 3036 3037 InitOMXParams(&legacyFormat); 3038 // this field will change when we find a suitable legacy format 3039 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 3040 3041 for (OMX_U32 index = 0; ; ++index) { 3042 format.nIndex = index; 3043 status_t err = mOMX->getParameter( 3044 mNode, OMX_IndexParamVideoPortFormat, 3045 &format, sizeof(format)); 3046 if (err != OK) { 3047 // no more formats, pick legacy format if found 3048 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 3049 memcpy(&format, &legacyFormat, sizeof(format)); 3050 break; 3051 } 3052 return err; 3053 } 3054 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 3055 return OMX_ErrorBadParameter; 3056 } 3057 if (!getLegacyFlexibleFormat) { 3058 break; 3059 } 3060 // standard formats that were exposed to users before 3061 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 3062 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 3063 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 3064 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 3065 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 3066 break; 3067 } 3068 // find best legacy non-standard format 3069 OMX_U32 flexibleEquivalent; 3070 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 3071 && isFlexibleColorFormat( 3072 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */, 3073 &flexibleEquivalent) 3074 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 3075 memcpy(&legacyFormat, &format, sizeof(format)); 3076 } 3077 } 3078 return mOMX->setParameter( 3079 mNode, OMX_IndexParamVideoPortFormat, 3080 &format, sizeof(format)); 3081} 3082 3083static const struct VideoCodingMapEntry { 3084 const char *mMime; 3085 OMX_VIDEO_CODINGTYPE mVideoCodingType; 3086} kVideoCodingMapEntry[] = { 3087 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 3088 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 3089 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 3090 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 3091 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 3092 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 3093 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 3094 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 3095}; 3096 3097static status_t GetVideoCodingTypeFromMime( 3098 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3099 for (size_t i = 0; 3100 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3101 ++i) { 3102 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3103 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3104 return OK; 3105 } 3106 } 3107 3108 *codingType = OMX_VIDEO_CodingUnused; 3109 3110 return ERROR_UNSUPPORTED; 3111} 3112 3113static status_t GetMimeTypeForVideoCoding( 3114 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3115 for (size_t i = 0; 3116 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3117 ++i) { 3118 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3119 *mime = kVideoCodingMapEntry[i].mMime; 3120 return OK; 3121 } 3122 } 3123 3124 mime->clear(); 3125 3126 return ERROR_UNSUPPORTED; 3127} 3128 3129status_t ACodec::setupVideoDecoder( 3130 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3131 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3132 int32_t width, height; 3133 if (!msg->findInt32("width", &width) 3134 || !msg->findInt32("height", &height)) { 3135 return INVALID_OPERATION; 3136 } 3137 3138 OMX_VIDEO_CODINGTYPE compressionFormat; 3139 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3140 3141 if (err != OK) { 3142 return err; 3143 } 3144 3145 if (compressionFormat == OMX_VIDEO_CodingVP9) { 3146 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 3147 InitOMXParams(¶ms); 3148 params.nPortIndex = kPortIndexInput; 3149 // Check if VP9 decoder advertises supported profiles. 3150 params.nProfileIndex = 0; 3151 status_t err = mOMX->getParameter( 3152 mNode, 3153 OMX_IndexParamVideoProfileLevelQuerySupported, 3154 ¶ms, 3155 sizeof(params)); 3156 mIsLegacyVP9Decoder = err != OK; 3157 } 3158 3159 err = setVideoPortFormatType( 3160 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3161 3162 if (err != OK) { 3163 return err; 3164 } 3165 3166 int32_t tmp; 3167 if (msg->findInt32("color-format", &tmp)) { 3168 OMX_COLOR_FORMATTYPE colorFormat = 3169 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3170 err = setVideoPortFormatType( 3171 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3172 if (err != OK) { 3173 ALOGW("[%s] does not support color format %d", 3174 mComponentName.c_str(), colorFormat); 3175 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3176 } 3177 } else { 3178 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3179 } 3180 3181 if (err != OK) { 3182 return err; 3183 } 3184 3185 int32_t frameRateInt; 3186 float frameRateFloat; 3187 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3188 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3189 frameRateInt = -1; 3190 } 3191 frameRateFloat = (float)frameRateInt; 3192 } 3193 3194 err = setVideoFormatOnPort( 3195 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3196 3197 if (err != OK) { 3198 return err; 3199 } 3200 3201 err = setVideoFormatOnPort( 3202 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3203 3204 if (err != OK) { 3205 return err; 3206 } 3207 3208 err = setColorAspectsForVideoDecoder( 3209 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3210 if (err == ERROR_UNSUPPORTED) { // support is optional 3211 err = OK; 3212 } 3213 3214 if (err != OK) { 3215 return err; 3216 } 3217 3218 err = setHDRStaticInfoForVideoCodec(kPortIndexOutput, msg, outputFormat); 3219 if (err == ERROR_UNSUPPORTED) { // support is optional 3220 err = OK; 3221 } 3222 return err; 3223} 3224 3225status_t ACodec::initDescribeColorAspectsIndex() { 3226 status_t err = mOMX->getExtensionIndex( 3227 mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3228 if (err != OK) { 3229 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3230 } 3231 return err; 3232} 3233 3234status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3235 status_t err = ERROR_UNSUPPORTED; 3236 if (mDescribeColorAspectsIndex) { 3237 err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3238 } 3239 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3240 mComponentName.c_str(), 3241 params.sAspects.mRange, asString(params.sAspects.mRange), 3242 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3243 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3244 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3245 err, asString(err)); 3246 3247 if (verify && err == OK) { 3248 err = getCodecColorAspects(params); 3249 } 3250 3251 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3252 "[%s] setting color aspects failed even though codec advertises support", 3253 mComponentName.c_str()); 3254 return err; 3255} 3256 3257status_t ACodec::setColorAspectsForVideoDecoder( 3258 int32_t width, int32_t height, bool usingNativeWindow, 3259 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3260 DescribeColorAspectsParams params; 3261 InitOMXParams(¶ms); 3262 params.nPortIndex = kPortIndexOutput; 3263 3264 getColorAspectsFromFormat(configFormat, params.sAspects); 3265 if (usingNativeWindow) { 3266 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3267 // The default aspects will be set back to the output format during the 3268 // getFormat phase of configure(). Set non-Unspecified values back into the 3269 // format, in case component does not support this enumeration. 3270 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3271 } 3272 3273 (void)initDescribeColorAspectsIndex(); 3274 3275 // communicate color aspects to codec 3276 return setCodecColorAspects(params); 3277} 3278 3279status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3280 status_t err = ERROR_UNSUPPORTED; 3281 if (mDescribeColorAspectsIndex) { 3282 err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3283 } 3284 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3285 mComponentName.c_str(), 3286 params.sAspects.mRange, asString(params.sAspects.mRange), 3287 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3288 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3289 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3290 err, asString(err)); 3291 if (params.bRequestingDataSpace) { 3292 ALOGV("for dataspace %#x", params.nDataSpace); 3293 } 3294 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3295 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3296 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3297 mComponentName.c_str()); 3298 } 3299 return err; 3300} 3301 3302status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3303 DescribeColorAspectsParams params; 3304 InitOMXParams(¶ms); 3305 params.nPortIndex = kPortIndexInput; 3306 status_t err = getCodecColorAspects(params); 3307 if (err == OK) { 3308 // we only set encoder input aspects if codec supports them 3309 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3310 } 3311 return err; 3312} 3313 3314status_t ACodec::getDataSpace( 3315 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3316 bool tryCodec) { 3317 status_t err = OK; 3318 if (tryCodec) { 3319 // request dataspace guidance from codec. 3320 params.bRequestingDataSpace = OMX_TRUE; 3321 err = getCodecColorAspects(params); 3322 params.bRequestingDataSpace = OMX_FALSE; 3323 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3324 *dataSpace = (android_dataspace)params.nDataSpace; 3325 return err; 3326 } else if (err == ERROR_UNSUPPORTED) { 3327 // ignore not-implemented error for dataspace requests 3328 err = OK; 3329 } 3330 } 3331 3332 // this returns legacy versions if available 3333 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3334 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3335 "and dataspace %#x", 3336 mComponentName.c_str(), 3337 params.sAspects.mRange, asString(params.sAspects.mRange), 3338 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3339 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3340 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3341 *dataSpace); 3342 return err; 3343} 3344 3345 3346status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3347 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3348 android_dataspace *dataSpace) { 3349 DescribeColorAspectsParams params; 3350 InitOMXParams(¶ms); 3351 params.nPortIndex = kPortIndexOutput; 3352 3353 // reset default format and get resulting format 3354 getColorAspectsFromFormat(configFormat, params.sAspects); 3355 if (dataSpace != NULL) { 3356 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3357 } 3358 status_t err = setCodecColorAspects(params, true /* readBack */); 3359 3360 // we always set specified aspects for decoders 3361 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3362 3363 if (dataSpace != NULL) { 3364 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3365 if (err == OK) { 3366 err = res; 3367 } 3368 } 3369 3370 return err; 3371} 3372 3373// initial video encoder setup for bytebuffer mode 3374status_t ACodec::setColorAspectsForVideoEncoder( 3375 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3376 // copy config to output format as this is not exposed via getFormat 3377 copyColorConfig(configFormat, outputFormat); 3378 3379 DescribeColorAspectsParams params; 3380 InitOMXParams(¶ms); 3381 params.nPortIndex = kPortIndexInput; 3382 getColorAspectsFromFormat(configFormat, params.sAspects); 3383 3384 (void)initDescribeColorAspectsIndex(); 3385 3386 int32_t usingRecorder; 3387 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3388 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3389 int32_t width, height; 3390 if (configFormat->findInt32("width", &width) 3391 && configFormat->findInt32("height", &height)) { 3392 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3393 status_t err = getDataSpace( 3394 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3395 if (err != OK) { 3396 return err; 3397 } 3398 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3399 } 3400 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3401 } 3402 3403 // communicate color aspects to codec, but do not allow change of the platform aspects 3404 ColorAspects origAspects = params.sAspects; 3405 for (int triesLeft = 2; --triesLeft >= 0; ) { 3406 status_t err = setCodecColorAspects(params, true /* readBack */); 3407 if (err != OK 3408 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3409 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3410 return err; 3411 } 3412 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3413 mComponentName.c_str()); 3414 } 3415 return OK; 3416} 3417 3418status_t ACodec::setHDRStaticInfoForVideoCodec( 3419 OMX_U32 portIndex, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3420 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3421 3422 DescribeHDRStaticInfoParams params; 3423 InitOMXParams(¶ms); 3424 params.nPortIndex = portIndex; 3425 3426 HDRStaticInfo *info = ¶ms.sInfo; 3427 if (getHDRStaticInfoFromFormat(configFormat, info)) { 3428 setHDRStaticInfoIntoFormat(params.sInfo, outputFormat); 3429 } 3430 3431 (void)initDescribeHDRStaticInfoIndex(); 3432 3433 // communicate HDR static Info to codec 3434 return setHDRStaticInfo(params); 3435} 3436 3437// subsequent initial video encoder setup for surface mode 3438status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3439 android_dataspace *dataSpace /* nonnull */) { 3440 DescribeColorAspectsParams params; 3441 InitOMXParams(¶ms); 3442 params.nPortIndex = kPortIndexInput; 3443 ColorAspects &aspects = params.sAspects; 3444 3445 // reset default format and store resulting format into both input and output formats 3446 getColorAspectsFromFormat(mConfigFormat, aspects); 3447 int32_t width, height; 3448 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3449 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3450 } 3451 setColorAspectsIntoFormat(aspects, mInputFormat); 3452 setColorAspectsIntoFormat(aspects, mOutputFormat); 3453 3454 // communicate color aspects to codec, but do not allow any change 3455 ColorAspects origAspects = aspects; 3456 status_t err = OK; 3457 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3458 status_t err = setCodecColorAspects(params, true /* readBack */); 3459 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3460 break; 3461 } 3462 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3463 mComponentName.c_str()); 3464 } 3465 3466 *dataSpace = HAL_DATASPACE_BT709; 3467 aspects = origAspects; // restore desired color aspects 3468 status_t res = getDataSpace( 3469 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3470 if (err == OK) { 3471 err = res; 3472 } 3473 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3474 mInputFormat->setBuffer( 3475 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3476 3477 // update input format with codec supported color aspects (basically set unsupported 3478 // aspects to Unspecified) 3479 if (err == OK) { 3480 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3481 } 3482 3483 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3484 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3485 3486 return err; 3487} 3488 3489status_t ACodec::getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format) { 3490 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3491 DescribeHDRStaticInfoParams params; 3492 InitOMXParams(¶ms); 3493 params.nPortIndex = portIndex; 3494 3495 status_t err = getHDRStaticInfo(params); 3496 if (err == OK) { 3497 // we only set decodec output HDRStaticInfo if codec supports them 3498 setHDRStaticInfoIntoFormat(params.sInfo, format); 3499 } 3500 return err; 3501} 3502 3503status_t ACodec::initDescribeHDRStaticInfoIndex() { 3504 status_t err = mOMX->getExtensionIndex( 3505 mNode, "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex); 3506 if (err != OK) { 3507 mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0; 3508 } 3509 return err; 3510} 3511 3512status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) { 3513 status_t err = ERROR_UNSUPPORTED; 3514 if (mDescribeHDRStaticInfoIndex) { 3515 err = mOMX->setConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3516 } 3517 3518 const HDRStaticInfo *info = ¶ms.sInfo; 3519 ALOGV("[%s] setting HDRStaticInfo (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, " 3520 "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)", 3521 mComponentName.c_str(), 3522 info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y, 3523 info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y, 3524 info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance, 3525 info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel); 3526 3527 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3528 "[%s] setting HDRStaticInfo failed even though codec advertises support", 3529 mComponentName.c_str()); 3530 return err; 3531} 3532 3533status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms) { 3534 status_t err = ERROR_UNSUPPORTED; 3535 if (mDescribeHDRStaticInfoIndex) { 3536 err = mOMX->getConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3537 } 3538 3539 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3540 "[%s] getting HDRStaticInfo failed even though codec advertises support", 3541 mComponentName.c_str()); 3542 return err; 3543} 3544 3545status_t ACodec::setupVideoEncoder( 3546 const char *mime, const sp<AMessage> &msg, 3547 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3548 int32_t tmp; 3549 if (!msg->findInt32("color-format", &tmp)) { 3550 return INVALID_OPERATION; 3551 } 3552 3553 OMX_COLOR_FORMATTYPE colorFormat = 3554 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3555 3556 status_t err = setVideoPortFormatType( 3557 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3558 3559 if (err != OK) { 3560 ALOGE("[%s] does not support color format %d", 3561 mComponentName.c_str(), colorFormat); 3562 3563 return err; 3564 } 3565 3566 /* Input port configuration */ 3567 3568 OMX_PARAM_PORTDEFINITIONTYPE def; 3569 InitOMXParams(&def); 3570 3571 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3572 3573 def.nPortIndex = kPortIndexInput; 3574 3575 err = mOMX->getParameter( 3576 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3577 3578 if (err != OK) { 3579 return err; 3580 } 3581 3582 int32_t width, height, bitrate; 3583 if (!msg->findInt32("width", &width) 3584 || !msg->findInt32("height", &height) 3585 || !msg->findInt32("bitrate", &bitrate)) { 3586 return INVALID_OPERATION; 3587 } 3588 3589 video_def->nFrameWidth = width; 3590 video_def->nFrameHeight = height; 3591 3592 int32_t stride; 3593 if (!msg->findInt32("stride", &stride)) { 3594 stride = width; 3595 } 3596 3597 video_def->nStride = stride; 3598 3599 int32_t sliceHeight; 3600 if (!msg->findInt32("slice-height", &sliceHeight)) { 3601 sliceHeight = height; 3602 } 3603 3604 video_def->nSliceHeight = sliceHeight; 3605 3606 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3607 3608 float frameRate; 3609 if (!msg->findFloat("frame-rate", &frameRate)) { 3610 int32_t tmp; 3611 if (!msg->findInt32("frame-rate", &tmp)) { 3612 return INVALID_OPERATION; 3613 } 3614 frameRate = (float)tmp; 3615 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3616 } 3617 3618 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3619 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3620 // this is redundant as it was already set up in setVideoPortFormatType 3621 // FIXME for now skip this only for flexible YUV formats 3622 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3623 video_def->eColorFormat = colorFormat; 3624 } 3625 3626 err = mOMX->setParameter( 3627 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3628 3629 if (err != OK) { 3630 ALOGE("[%s] failed to set input port definition parameters.", 3631 mComponentName.c_str()); 3632 3633 return err; 3634 } 3635 3636 /* Output port configuration */ 3637 3638 OMX_VIDEO_CODINGTYPE compressionFormat; 3639 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3640 3641 if (err != OK) { 3642 return err; 3643 } 3644 3645 err = setVideoPortFormatType( 3646 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3647 3648 if (err != OK) { 3649 ALOGE("[%s] does not support compression format %d", 3650 mComponentName.c_str(), compressionFormat); 3651 3652 return err; 3653 } 3654 3655 def.nPortIndex = kPortIndexOutput; 3656 3657 err = mOMX->getParameter( 3658 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3659 3660 if (err != OK) { 3661 return err; 3662 } 3663 3664 video_def->nFrameWidth = width; 3665 video_def->nFrameHeight = height; 3666 video_def->xFramerate = 0; 3667 video_def->nBitrate = bitrate; 3668 video_def->eCompressionFormat = compressionFormat; 3669 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3670 3671 err = mOMX->setParameter( 3672 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3673 3674 if (err != OK) { 3675 ALOGE("[%s] failed to set output port definition parameters.", 3676 mComponentName.c_str()); 3677 3678 return err; 3679 } 3680 3681 int32_t intraRefreshPeriod = 0; 3682 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3683 && intraRefreshPeriod >= 0) { 3684 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3685 if (err != OK) { 3686 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3687 mComponentName.c_str()); 3688 err = OK; 3689 } 3690 } 3691 3692 switch (compressionFormat) { 3693 case OMX_VIDEO_CodingMPEG4: 3694 err = setupMPEG4EncoderParameters(msg); 3695 break; 3696 3697 case OMX_VIDEO_CodingH263: 3698 err = setupH263EncoderParameters(msg); 3699 break; 3700 3701 case OMX_VIDEO_CodingAVC: 3702 err = setupAVCEncoderParameters(msg); 3703 break; 3704 3705 case OMX_VIDEO_CodingHEVC: 3706 err = setupHEVCEncoderParameters(msg); 3707 break; 3708 3709 case OMX_VIDEO_CodingVP8: 3710 case OMX_VIDEO_CodingVP9: 3711 err = setupVPXEncoderParameters(msg); 3712 break; 3713 3714 default: 3715 break; 3716 } 3717 3718 // Set up color aspects on input, but propagate them to the output format, as they will 3719 // not be read back from encoder. 3720 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3721 if (err == ERROR_UNSUPPORTED) { 3722 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3723 err = OK; 3724 } 3725 3726 if (err != OK) { 3727 return err; 3728 } 3729 3730 err = setHDRStaticInfoForVideoCodec(kPortIndexInput, msg, outputFormat); 3731 if (err == ERROR_UNSUPPORTED) { // support is optional 3732 ALOGI("[%s] cannot encode HDR static metadata. Ignoring.", mComponentName.c_str()); 3733 err = OK; 3734 } 3735 3736 if (err == OK) { 3737 ALOGI("setupVideoEncoder succeeded"); 3738 } 3739 3740 return err; 3741} 3742 3743status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3744 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3745 InitOMXParams(¶ms); 3746 params.nPortIndex = kPortIndexOutput; 3747 3748 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3749 3750 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3751 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3752 int32_t mbs; 3753 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3754 return INVALID_OPERATION; 3755 } 3756 params.nCirMBs = mbs; 3757 } 3758 3759 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3760 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3761 int32_t mbs; 3762 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3763 return INVALID_OPERATION; 3764 } 3765 params.nAirMBs = mbs; 3766 3767 int32_t ref; 3768 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3769 return INVALID_OPERATION; 3770 } 3771 params.nAirRef = ref; 3772 } 3773 3774 status_t err = mOMX->setParameter( 3775 mNode, OMX_IndexParamVideoIntraRefresh, 3776 ¶ms, sizeof(params)); 3777 return err; 3778} 3779 3780static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) { 3781 if (iFramesInterval < 0) { 3782 return 0xFFFFFFFF; 3783 } else if (iFramesInterval == 0) { 3784 return 0; 3785 } 3786 OMX_U32 ret = frameRate * iFramesInterval; 3787 return ret; 3788} 3789 3790static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 3791 int32_t tmp; 3792 if (!msg->findInt32("bitrate-mode", &tmp)) { 3793 return OMX_Video_ControlRateVariable; 3794 } 3795 3796 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 3797} 3798 3799status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 3800 int32_t bitrate, iFrameInterval; 3801 if (!msg->findInt32("bitrate", &bitrate) 3802 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3803 return INVALID_OPERATION; 3804 } 3805 3806 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3807 3808 float frameRate; 3809 if (!msg->findFloat("frame-rate", &frameRate)) { 3810 int32_t tmp; 3811 if (!msg->findInt32("frame-rate", &tmp)) { 3812 return INVALID_OPERATION; 3813 } 3814 frameRate = (float)tmp; 3815 } 3816 3817 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 3818 InitOMXParams(&mpeg4type); 3819 mpeg4type.nPortIndex = kPortIndexOutput; 3820 3821 status_t err = mOMX->getParameter( 3822 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3823 3824 if (err != OK) { 3825 return err; 3826 } 3827 3828 mpeg4type.nSliceHeaderSpacing = 0; 3829 mpeg4type.bSVH = OMX_FALSE; 3830 mpeg4type.bGov = OMX_FALSE; 3831 3832 mpeg4type.nAllowedPictureTypes = 3833 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3834 3835 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3836 if (mpeg4type.nPFrames == 0) { 3837 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3838 } 3839 mpeg4type.nBFrames = 0; 3840 mpeg4type.nIDCVLCThreshold = 0; 3841 mpeg4type.bACPred = OMX_TRUE; 3842 mpeg4type.nMaxPacketSize = 256; 3843 mpeg4type.nTimeIncRes = 1000; 3844 mpeg4type.nHeaderExtension = 0; 3845 mpeg4type.bReversibleVLC = OMX_FALSE; 3846 3847 int32_t profile; 3848 if (msg->findInt32("profile", &profile)) { 3849 int32_t level; 3850 if (!msg->findInt32("level", &level)) { 3851 return INVALID_OPERATION; 3852 } 3853 3854 err = verifySupportForProfileAndLevel(profile, level); 3855 3856 if (err != OK) { 3857 return err; 3858 } 3859 3860 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 3861 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 3862 } 3863 3864 err = mOMX->setParameter( 3865 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3866 3867 if (err != OK) { 3868 return err; 3869 } 3870 3871 err = configureBitrate(bitrate, bitrateMode); 3872 3873 if (err != OK) { 3874 return err; 3875 } 3876 3877 return setupErrorCorrectionParameters(); 3878} 3879 3880status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 3881 int32_t bitrate, iFrameInterval; 3882 if (!msg->findInt32("bitrate", &bitrate) 3883 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3884 return INVALID_OPERATION; 3885 } 3886 3887 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3888 3889 float frameRate; 3890 if (!msg->findFloat("frame-rate", &frameRate)) { 3891 int32_t tmp; 3892 if (!msg->findInt32("frame-rate", &tmp)) { 3893 return INVALID_OPERATION; 3894 } 3895 frameRate = (float)tmp; 3896 } 3897 3898 OMX_VIDEO_PARAM_H263TYPE h263type; 3899 InitOMXParams(&h263type); 3900 h263type.nPortIndex = kPortIndexOutput; 3901 3902 status_t err = mOMX->getParameter( 3903 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3904 3905 if (err != OK) { 3906 return err; 3907 } 3908 3909 h263type.nAllowedPictureTypes = 3910 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3911 3912 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3913 if (h263type.nPFrames == 0) { 3914 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3915 } 3916 h263type.nBFrames = 0; 3917 3918 int32_t profile; 3919 if (msg->findInt32("profile", &profile)) { 3920 int32_t level; 3921 if (!msg->findInt32("level", &level)) { 3922 return INVALID_OPERATION; 3923 } 3924 3925 err = verifySupportForProfileAndLevel(profile, level); 3926 3927 if (err != OK) { 3928 return err; 3929 } 3930 3931 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 3932 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 3933 } 3934 3935 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 3936 h263type.bForceRoundingTypeToZero = OMX_FALSE; 3937 h263type.nPictureHeaderRepetition = 0; 3938 h263type.nGOBHeaderInterval = 0; 3939 3940 err = mOMX->setParameter( 3941 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3942 3943 if (err != OK) { 3944 return err; 3945 } 3946 3947 err = configureBitrate(bitrate, bitrateMode); 3948 3949 if (err != OK) { 3950 return err; 3951 } 3952 3953 return setupErrorCorrectionParameters(); 3954} 3955 3956// static 3957int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 3958 int width, int height, int rate, int bitrate, 3959 OMX_VIDEO_AVCPROFILETYPE profile) { 3960 // convert bitrate to main/baseline profile kbps equivalent 3961 switch (profile) { 3962 case OMX_VIDEO_AVCProfileHigh10: 3963 bitrate = divUp(bitrate, 3000); break; 3964 case OMX_VIDEO_AVCProfileHigh: 3965 bitrate = divUp(bitrate, 1250); break; 3966 default: 3967 bitrate = divUp(bitrate, 1000); break; 3968 } 3969 3970 // convert size and rate to MBs 3971 width = divUp(width, 16); 3972 height = divUp(height, 16); 3973 int mbs = width * height; 3974 rate *= mbs; 3975 int maxDimension = max(width, height); 3976 3977 static const int limits[][5] = { 3978 /* MBps MB dim bitrate level */ 3979 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 3980 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 3981 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 3982 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 3983 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 3984 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 3985 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 3986 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 3987 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 3988 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 3989 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 3990 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 3991 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 3992 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 3993 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 3994 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 3995 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 3996 }; 3997 3998 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 3999 const int (&limit)[5] = limits[i]; 4000 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 4001 && bitrate <= limit[3]) { 4002 return limit[4]; 4003 } 4004 } 4005 return 0; 4006} 4007 4008status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 4009 int32_t bitrate, iFrameInterval; 4010 if (!msg->findInt32("bitrate", &bitrate) 4011 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4012 return INVALID_OPERATION; 4013 } 4014 4015 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4016 4017 float frameRate; 4018 if (!msg->findFloat("frame-rate", &frameRate)) { 4019 int32_t tmp; 4020 if (!msg->findInt32("frame-rate", &tmp)) { 4021 return INVALID_OPERATION; 4022 } 4023 frameRate = (float)tmp; 4024 } 4025 4026 status_t err = OK; 4027 int32_t intraRefreshMode = 0; 4028 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 4029 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 4030 if (err != OK) { 4031 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 4032 err, intraRefreshMode); 4033 return err; 4034 } 4035 } 4036 4037 OMX_VIDEO_PARAM_AVCTYPE h264type; 4038 InitOMXParams(&h264type); 4039 h264type.nPortIndex = kPortIndexOutput; 4040 4041 err = mOMX->getParameter( 4042 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4043 4044 if (err != OK) { 4045 return err; 4046 } 4047 4048 h264type.nAllowedPictureTypes = 4049 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4050 4051 int32_t profile; 4052 if (msg->findInt32("profile", &profile)) { 4053 int32_t level; 4054 if (!msg->findInt32("level", &level)) { 4055 return INVALID_OPERATION; 4056 } 4057 4058 err = verifySupportForProfileAndLevel(profile, level); 4059 4060 if (err != OK) { 4061 return err; 4062 } 4063 4064 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 4065 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 4066 } else { 4067 // Use baseline profile for AVC recording if profile is not specified. 4068 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 4069 } 4070 4071 ALOGI("setupAVCEncoderParameters with [profile: %s] [level: %s]", 4072 asString(h264type.eProfile), asString(h264type.eLevel)); 4073 4074 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 4075 h264type.nSliceHeaderSpacing = 0; 4076 h264type.bUseHadamard = OMX_TRUE; 4077 h264type.nRefFrames = 1; 4078 h264type.nBFrames = 0; 4079 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 4080 if (h264type.nPFrames == 0) { 4081 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4082 } 4083 h264type.nRefIdx10ActiveMinus1 = 0; 4084 h264type.nRefIdx11ActiveMinus1 = 0; 4085 h264type.bEntropyCodingCABAC = OMX_FALSE; 4086 h264type.bWeightedPPrediction = OMX_FALSE; 4087 h264type.bconstIpred = OMX_FALSE; 4088 h264type.bDirect8x8Inference = OMX_FALSE; 4089 h264type.bDirectSpatialTemporal = OMX_FALSE; 4090 h264type.nCabacInitIdc = 0; 4091 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 4092 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 4093 h264type.nSliceHeaderSpacing = 0; 4094 h264type.bUseHadamard = OMX_TRUE; 4095 h264type.nRefFrames = 2; 4096 h264type.nBFrames = 1; 4097 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 4098 h264type.nAllowedPictureTypes = 4099 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 4100 h264type.nRefIdx10ActiveMinus1 = 0; 4101 h264type.nRefIdx11ActiveMinus1 = 0; 4102 h264type.bEntropyCodingCABAC = OMX_TRUE; 4103 h264type.bWeightedPPrediction = OMX_TRUE; 4104 h264type.bconstIpred = OMX_TRUE; 4105 h264type.bDirect8x8Inference = OMX_TRUE; 4106 h264type.bDirectSpatialTemporal = OMX_TRUE; 4107 h264type.nCabacInitIdc = 1; 4108 } 4109 4110 if (h264type.nBFrames != 0) { 4111 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 4112 } 4113 4114 h264type.bEnableUEP = OMX_FALSE; 4115 h264type.bEnableFMO = OMX_FALSE; 4116 h264type.bEnableASO = OMX_FALSE; 4117 h264type.bEnableRS = OMX_FALSE; 4118 h264type.bFrameMBsOnly = OMX_TRUE; 4119 h264type.bMBAFF = OMX_FALSE; 4120 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 4121 4122 err = mOMX->setParameter( 4123 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4124 4125 if (err != OK) { 4126 return err; 4127 } 4128 4129 return configureBitrate(bitrate, bitrateMode); 4130} 4131 4132status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 4133 int32_t bitrate, iFrameInterval; 4134 if (!msg->findInt32("bitrate", &bitrate) 4135 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4136 return INVALID_OPERATION; 4137 } 4138 4139 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4140 4141 float frameRate; 4142 if (!msg->findFloat("frame-rate", &frameRate)) { 4143 int32_t tmp; 4144 if (!msg->findInt32("frame-rate", &tmp)) { 4145 return INVALID_OPERATION; 4146 } 4147 frameRate = (float)tmp; 4148 } 4149 4150 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 4151 InitOMXParams(&hevcType); 4152 hevcType.nPortIndex = kPortIndexOutput; 4153 4154 status_t err = OK; 4155 err = mOMX->getParameter( 4156 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4157 if (err != OK) { 4158 return err; 4159 } 4160 4161 int32_t profile; 4162 if (msg->findInt32("profile", &profile)) { 4163 int32_t level; 4164 if (!msg->findInt32("level", &level)) { 4165 return INVALID_OPERATION; 4166 } 4167 4168 err = verifySupportForProfileAndLevel(profile, level); 4169 if (err != OK) { 4170 return err; 4171 } 4172 4173 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 4174 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 4175 } 4176 // TODO: finer control? 4177 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4178 4179 err = mOMX->setParameter( 4180 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4181 if (err != OK) { 4182 return err; 4183 } 4184 4185 return configureBitrate(bitrate, bitrateMode); 4186} 4187 4188status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { 4189 int32_t bitrate; 4190 int32_t iFrameInterval = 0; 4191 size_t tsLayers = 0; 4192 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 4193 OMX_VIDEO_VPXTemporalLayerPatternNone; 4194 static const uint32_t kVp8LayerRateAlloction 4195 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 4196 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 4197 {100, 100, 100}, // 1 layer 4198 { 60, 100, 100}, // 2 layers {60%, 40%} 4199 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 4200 }; 4201 if (!msg->findInt32("bitrate", &bitrate)) { 4202 return INVALID_OPERATION; 4203 } 4204 msg->findInt32("i-frame-interval", &iFrameInterval); 4205 4206 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4207 4208 float frameRate; 4209 if (!msg->findFloat("frame-rate", &frameRate)) { 4210 int32_t tmp; 4211 if (!msg->findInt32("frame-rate", &tmp)) { 4212 return INVALID_OPERATION; 4213 } 4214 frameRate = (float)tmp; 4215 } 4216 4217 AString tsSchema; 4218 if (msg->findString("ts-schema", &tsSchema)) { 4219 if (tsSchema == "webrtc.vp8.1-layer") { 4220 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4221 tsLayers = 1; 4222 } else if (tsSchema == "webrtc.vp8.2-layer") { 4223 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4224 tsLayers = 2; 4225 } else if (tsSchema == "webrtc.vp8.3-layer") { 4226 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4227 tsLayers = 3; 4228 } else { 4229 ALOGW("Unsupported ts-schema [%s]", tsSchema.c_str()); 4230 } 4231 } 4232 4233 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4234 InitOMXParams(&vp8type); 4235 vp8type.nPortIndex = kPortIndexOutput; 4236 status_t err = mOMX->getParameter( 4237 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4238 &vp8type, sizeof(vp8type)); 4239 4240 if (err == OK) { 4241 if (iFrameInterval > 0) { 4242 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4243 } 4244 vp8type.eTemporalPattern = pattern; 4245 vp8type.nTemporalLayerCount = tsLayers; 4246 if (tsLayers > 0) { 4247 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4248 vp8type.nTemporalLayerBitrateRatio[i] = 4249 kVp8LayerRateAlloction[tsLayers - 1][i]; 4250 } 4251 } 4252 if (bitrateMode == OMX_Video_ControlRateConstant) { 4253 vp8type.nMinQuantizer = 2; 4254 vp8type.nMaxQuantizer = 63; 4255 } 4256 4257 err = mOMX->setParameter( 4258 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4259 &vp8type, sizeof(vp8type)); 4260 if (err != OK) { 4261 ALOGW("Extended VP8 parameters set failed: %d", err); 4262 } 4263 } 4264 4265 return configureBitrate(bitrate, bitrateMode); 4266} 4267 4268status_t ACodec::verifySupportForProfileAndLevel( 4269 int32_t profile, int32_t level) { 4270 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4271 InitOMXParams(¶ms); 4272 params.nPortIndex = kPortIndexOutput; 4273 4274 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4275 params.nProfileIndex = index; 4276 status_t err = mOMX->getParameter( 4277 mNode, 4278 OMX_IndexParamVideoProfileLevelQuerySupported, 4279 ¶ms, 4280 sizeof(params)); 4281 4282 if (err != OK) { 4283 return err; 4284 } 4285 4286 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4287 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4288 4289 if (profile == supportedProfile && level <= supportedLevel) { 4290 return OK; 4291 } 4292 4293 if (index == kMaxIndicesToCheck) { 4294 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4295 mComponentName.c_str(), index, 4296 params.eProfile, params.eLevel); 4297 } 4298 } 4299 return ERROR_UNSUPPORTED; 4300} 4301 4302status_t ACodec::configureBitrate( 4303 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4304 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4305 InitOMXParams(&bitrateType); 4306 bitrateType.nPortIndex = kPortIndexOutput; 4307 4308 status_t err = mOMX->getParameter( 4309 mNode, OMX_IndexParamVideoBitrate, 4310 &bitrateType, sizeof(bitrateType)); 4311 4312 if (err != OK) { 4313 return err; 4314 } 4315 4316 bitrateType.eControlRate = bitrateMode; 4317 bitrateType.nTargetBitrate = bitrate; 4318 4319 return mOMX->setParameter( 4320 mNode, OMX_IndexParamVideoBitrate, 4321 &bitrateType, sizeof(bitrateType)); 4322} 4323 4324status_t ACodec::setupErrorCorrectionParameters() { 4325 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4326 InitOMXParams(&errorCorrectionType); 4327 errorCorrectionType.nPortIndex = kPortIndexOutput; 4328 4329 status_t err = mOMX->getParameter( 4330 mNode, OMX_IndexParamVideoErrorCorrection, 4331 &errorCorrectionType, sizeof(errorCorrectionType)); 4332 4333 if (err != OK) { 4334 return OK; // Optional feature. Ignore this failure 4335 } 4336 4337 errorCorrectionType.bEnableHEC = OMX_FALSE; 4338 errorCorrectionType.bEnableResync = OMX_TRUE; 4339 errorCorrectionType.nResynchMarkerSpacing = 256; 4340 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4341 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4342 4343 return mOMX->setParameter( 4344 mNode, OMX_IndexParamVideoErrorCorrection, 4345 &errorCorrectionType, sizeof(errorCorrectionType)); 4346} 4347 4348status_t ACodec::setVideoFormatOnPort( 4349 OMX_U32 portIndex, 4350 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4351 float frameRate) { 4352 OMX_PARAM_PORTDEFINITIONTYPE def; 4353 InitOMXParams(&def); 4354 def.nPortIndex = portIndex; 4355 4356 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4357 4358 status_t err = mOMX->getParameter( 4359 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4360 if (err != OK) { 4361 return err; 4362 } 4363 4364 if (portIndex == kPortIndexInput) { 4365 // XXX Need a (much) better heuristic to compute input buffer sizes. 4366 const size_t X = 64 * 1024; 4367 if (def.nBufferSize < X) { 4368 def.nBufferSize = X; 4369 } 4370 } 4371 4372 if (def.eDomain != OMX_PortDomainVideo) { 4373 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4374 return FAILED_TRANSACTION; 4375 } 4376 4377 video_def->nFrameWidth = width; 4378 video_def->nFrameHeight = height; 4379 4380 if (portIndex == kPortIndexInput) { 4381 video_def->eCompressionFormat = compressionFormat; 4382 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4383 if (frameRate >= 0) { 4384 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4385 } 4386 } 4387 4388 err = mOMX->setParameter( 4389 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4390 4391 return err; 4392} 4393 4394status_t ACodec::initNativeWindow() { 4395 if (mNativeWindow != NULL) { 4396 return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4397 } 4398 4399 mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4400 return OK; 4401} 4402 4403size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4404 size_t n = 0; 4405 4406 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4407 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4408 4409 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4410 ++n; 4411 } 4412 } 4413 4414 return n; 4415} 4416 4417size_t ACodec::countBuffersOwnedByNativeWindow() const { 4418 size_t n = 0; 4419 4420 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4421 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4422 4423 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4424 ++n; 4425 } 4426 } 4427 4428 return n; 4429} 4430 4431void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4432 if (mNativeWindow == NULL) { 4433 return; 4434 } 4435 4436 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4437 && dequeueBufferFromNativeWindow() != NULL) { 4438 // these buffers will be submitted as regular buffers; account for this 4439 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4440 --mMetadataBuffersToSubmit; 4441 } 4442 } 4443} 4444 4445bool ACodec::allYourBuffersAreBelongToUs( 4446 OMX_U32 portIndex) { 4447 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4448 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4449 4450 if (info->mStatus != BufferInfo::OWNED_BY_US 4451 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4452 ALOGV("[%s] Buffer %u on port %u still has status %d", 4453 mComponentName.c_str(), 4454 info->mBufferID, portIndex, info->mStatus); 4455 return false; 4456 } 4457 } 4458 4459 return true; 4460} 4461 4462bool ACodec::allYourBuffersAreBelongToUs() { 4463 return allYourBuffersAreBelongToUs(kPortIndexInput) 4464 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4465} 4466 4467void ACodec::deferMessage(const sp<AMessage> &msg) { 4468 mDeferredQueue.push_back(msg); 4469} 4470 4471void ACodec::processDeferredMessages() { 4472 List<sp<AMessage> > queue = mDeferredQueue; 4473 mDeferredQueue.clear(); 4474 4475 List<sp<AMessage> >::iterator it = queue.begin(); 4476 while (it != queue.end()) { 4477 onMessageReceived(*it++); 4478 } 4479} 4480 4481// static 4482bool ACodec::describeDefaultColorFormat(DescribeColorFormat2Params ¶ms) { 4483 MediaImage2 &image = params.sMediaImage; 4484 memset(&image, 0, sizeof(image)); 4485 4486 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4487 image.mNumPlanes = 0; 4488 4489 const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat; 4490 image.mWidth = params.nFrameWidth; 4491 image.mHeight = params.nFrameHeight; 4492 4493 // only supporting YUV420 4494 if (fmt != OMX_COLOR_FormatYUV420Planar && 4495 fmt != OMX_COLOR_FormatYUV420PackedPlanar && 4496 fmt != OMX_COLOR_FormatYUV420SemiPlanar && 4497 fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar && 4498 fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) { 4499 ALOGW("do not know color format 0x%x = %d", fmt, fmt); 4500 return false; 4501 } 4502 4503 // TEMPORARY FIX for some vendors that advertise sliceHeight as 0 4504 if (params.nStride != 0 && params.nSliceHeight == 0) { 4505 ALOGW("using sliceHeight=%u instead of what codec advertised (=0)", 4506 params.nFrameHeight); 4507 params.nSliceHeight = params.nFrameHeight; 4508 } 4509 4510 // we need stride and slice-height to be non-zero and sensible. These values were chosen to 4511 // prevent integer overflows further down the line, and do not indicate support for 4512 // 32kx32k video. 4513 if (params.nStride == 0 || params.nSliceHeight == 0 4514 || params.nStride > 32768 || params.nSliceHeight > 32768) { 4515 ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u", 4516 fmt, fmt, params.nStride, params.nSliceHeight); 4517 return false; 4518 } 4519 4520 // set-up YUV format 4521 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV; 4522 image.mNumPlanes = 3; 4523 image.mBitDepth = 8; 4524 image.mBitDepthAllocated = 8; 4525 image.mPlane[image.Y].mOffset = 0; 4526 image.mPlane[image.Y].mColInc = 1; 4527 image.mPlane[image.Y].mRowInc = params.nStride; 4528 image.mPlane[image.Y].mHorizSubsampling = 1; 4529 image.mPlane[image.Y].mVertSubsampling = 1; 4530 4531 switch ((int)fmt) { 4532 case HAL_PIXEL_FORMAT_YV12: 4533 if (params.bUsingNativeBuffers) { 4534 size_t ystride = align(params.nStride, 16); 4535 size_t cstride = align(params.nStride / 2, 16); 4536 image.mPlane[image.Y].mRowInc = ystride; 4537 4538 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight; 4539 image.mPlane[image.V].mColInc = 1; 4540 image.mPlane[image.V].mRowInc = cstride; 4541 image.mPlane[image.V].mHorizSubsampling = 2; 4542 image.mPlane[image.V].mVertSubsampling = 2; 4543 4544 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset 4545 + (cstride * params.nSliceHeight / 2); 4546 image.mPlane[image.U].mColInc = 1; 4547 image.mPlane[image.U].mRowInc = cstride; 4548 image.mPlane[image.U].mHorizSubsampling = 2; 4549 image.mPlane[image.U].mVertSubsampling = 2; 4550 break; 4551 } else { 4552 // fall through as YV12 is used for YUV420Planar by some codecs 4553 } 4554 4555 case OMX_COLOR_FormatYUV420Planar: 4556 case OMX_COLOR_FormatYUV420PackedPlanar: 4557 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4558 image.mPlane[image.U].mColInc = 1; 4559 image.mPlane[image.U].mRowInc = params.nStride / 2; 4560 image.mPlane[image.U].mHorizSubsampling = 2; 4561 image.mPlane[image.U].mVertSubsampling = 2; 4562 4563 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset 4564 + (params.nStride * params.nSliceHeight / 4); 4565 image.mPlane[image.V].mColInc = 1; 4566 image.mPlane[image.V].mRowInc = params.nStride / 2; 4567 image.mPlane[image.V].mHorizSubsampling = 2; 4568 image.mPlane[image.V].mVertSubsampling = 2; 4569 break; 4570 4571 case OMX_COLOR_FormatYUV420SemiPlanar: 4572 // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder 4573 case OMX_COLOR_FormatYUV420PackedSemiPlanar: 4574 // NV12 4575 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4576 image.mPlane[image.U].mColInc = 2; 4577 image.mPlane[image.U].mRowInc = params.nStride; 4578 image.mPlane[image.U].mHorizSubsampling = 2; 4579 image.mPlane[image.U].mVertSubsampling = 2; 4580 4581 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1; 4582 image.mPlane[image.V].mColInc = 2; 4583 image.mPlane[image.V].mRowInc = params.nStride; 4584 image.mPlane[image.V].mHorizSubsampling = 2; 4585 image.mPlane[image.V].mVertSubsampling = 2; 4586 break; 4587 4588 default: 4589 TRESPASS(); 4590 } 4591 return true; 4592} 4593 4594// static 4595bool ACodec::describeColorFormat( 4596 const sp<IOMX> &omx, IOMX::node_id node, 4597 DescribeColorFormat2Params &describeParams) 4598{ 4599 OMX_INDEXTYPE describeColorFormatIndex; 4600 if (omx->getExtensionIndex( 4601 node, "OMX.google.android.index.describeColorFormat", 4602 &describeColorFormatIndex) == OK) { 4603 DescribeColorFormatParams describeParamsV1(describeParams); 4604 if (omx->getParameter( 4605 node, describeColorFormatIndex, 4606 &describeParamsV1, sizeof(describeParamsV1)) == OK) { 4607 describeParams.initFromV1(describeParamsV1); 4608 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4609 } 4610 } else if (omx->getExtensionIndex( 4611 node, "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK 4612 && omx->getParameter( 4613 node, describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) { 4614 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4615 } 4616 4617 return describeDefaultColorFormat(describeParams); 4618} 4619 4620// static 4621bool ACodec::isFlexibleColorFormat( 4622 const sp<IOMX> &omx, IOMX::node_id node, 4623 uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) { 4624 DescribeColorFormat2Params describeParams; 4625 InitOMXParams(&describeParams); 4626 describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; 4627 // reasonable dummy values 4628 describeParams.nFrameWidth = 128; 4629 describeParams.nFrameHeight = 128; 4630 describeParams.nStride = 128; 4631 describeParams.nSliceHeight = 128; 4632 describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers; 4633 4634 CHECK(flexibleEquivalent != NULL); 4635 4636 if (!describeColorFormat(omx, node, describeParams)) { 4637 return false; 4638 } 4639 4640 const MediaImage2 &img = describeParams.sMediaImage; 4641 if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) { 4642 if (img.mNumPlanes != 3 4643 || img.mPlane[img.Y].mHorizSubsampling != 1 4644 || img.mPlane[img.Y].mVertSubsampling != 1) { 4645 return false; 4646 } 4647 4648 // YUV 420 4649 if (img.mPlane[img.U].mHorizSubsampling == 2 4650 && img.mPlane[img.U].mVertSubsampling == 2 4651 && img.mPlane[img.V].mHorizSubsampling == 2 4652 && img.mPlane[img.V].mVertSubsampling == 2) { 4653 // possible flexible YUV420 format 4654 if (img.mBitDepth <= 8) { 4655 *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible; 4656 return true; 4657 } 4658 } 4659 } 4660 return false; 4661} 4662 4663status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4664 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4665 OMX_PARAM_PORTDEFINITIONTYPE def; 4666 InitOMXParams(&def); 4667 def.nPortIndex = portIndex; 4668 4669 status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4670 if (err != OK) { 4671 return err; 4672 } 4673 4674 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4675 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4676 return BAD_VALUE; 4677 } 4678 4679 switch (def.eDomain) { 4680 case OMX_PortDomainVideo: 4681 { 4682 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4683 switch ((int)videoDef->eCompressionFormat) { 4684 case OMX_VIDEO_CodingUnused: 4685 { 4686 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4687 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4688 4689 notify->setInt32("stride", videoDef->nStride); 4690 notify->setInt32("slice-height", videoDef->nSliceHeight); 4691 notify->setInt32("color-format", videoDef->eColorFormat); 4692 4693 if (mNativeWindow == NULL) { 4694 DescribeColorFormat2Params describeParams; 4695 InitOMXParams(&describeParams); 4696 describeParams.eColorFormat = videoDef->eColorFormat; 4697 describeParams.nFrameWidth = videoDef->nFrameWidth; 4698 describeParams.nFrameHeight = videoDef->nFrameHeight; 4699 describeParams.nStride = videoDef->nStride; 4700 describeParams.nSliceHeight = videoDef->nSliceHeight; 4701 describeParams.bUsingNativeBuffers = OMX_FALSE; 4702 4703 if (describeColorFormat(mOMX, mNode, describeParams)) { 4704 notify->setBuffer( 4705 "image-data", 4706 ABuffer::CreateAsCopy( 4707 &describeParams.sMediaImage, 4708 sizeof(describeParams.sMediaImage))); 4709 4710 MediaImage2 &img = describeParams.sMediaImage; 4711 MediaImage2::PlaneInfo *plane = img.mPlane; 4712 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4713 mComponentName.c_str(), img.mWidth, img.mHeight, 4714 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4715 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4716 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4717 } 4718 } 4719 4720 int32_t width = (int32_t)videoDef->nFrameWidth; 4721 int32_t height = (int32_t)videoDef->nFrameHeight; 4722 4723 if (portIndex == kPortIndexOutput) { 4724 OMX_CONFIG_RECTTYPE rect; 4725 InitOMXParams(&rect); 4726 rect.nPortIndex = portIndex; 4727 4728 if (mOMX->getConfig( 4729 mNode, 4730 (portIndex == kPortIndexOutput ? 4731 OMX_IndexConfigCommonOutputCrop : 4732 OMX_IndexConfigCommonInputCrop), 4733 &rect, sizeof(rect)) != OK) { 4734 rect.nLeft = 0; 4735 rect.nTop = 0; 4736 rect.nWidth = videoDef->nFrameWidth; 4737 rect.nHeight = videoDef->nFrameHeight; 4738 } 4739 4740 if (rect.nLeft < 0 || 4741 rect.nTop < 0 || 4742 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4743 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4744 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 4745 rect.nLeft, rect.nTop, 4746 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 4747 videoDef->nFrameWidth, videoDef->nFrameHeight); 4748 return BAD_VALUE; 4749 } 4750 4751 notify->setRect( 4752 "crop", 4753 rect.nLeft, 4754 rect.nTop, 4755 rect.nLeft + rect.nWidth - 1, 4756 rect.nTop + rect.nHeight - 1); 4757 4758 width = rect.nWidth; 4759 height = rect.nHeight; 4760 4761 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4762 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4763 width, height, mConfigFormat, notify, 4764 mUsingNativeWindow ? &dataSpace : NULL); 4765 if (mUsingNativeWindow) { 4766 notify->setInt32("android._dataspace", dataSpace); 4767 } 4768 (void)getHDRStaticInfoForVideoCodec(kPortIndexOutput, notify); 4769 } else { 4770 (void)getInputColorAspectsForVideoEncoder(notify); 4771 if (mConfigFormat->contains("hdr-static-info")) { 4772 (void)getHDRStaticInfoForVideoCodec(kPortIndexInput, notify); 4773 } 4774 } 4775 4776 break; 4777 } 4778 4779 case OMX_VIDEO_CodingVP8: 4780 case OMX_VIDEO_CodingVP9: 4781 { 4782 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4783 InitOMXParams(&vp8type); 4784 vp8type.nPortIndex = kPortIndexOutput; 4785 status_t err = mOMX->getParameter( 4786 mNode, 4787 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4788 &vp8type, 4789 sizeof(vp8type)); 4790 4791 if (err == OK) { 4792 AString tsSchema = "none"; 4793 if (vp8type.eTemporalPattern 4794 == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { 4795 switch (vp8type.nTemporalLayerCount) { 4796 case 1: 4797 { 4798 tsSchema = "webrtc.vp8.1-layer"; 4799 break; 4800 } 4801 case 2: 4802 { 4803 tsSchema = "webrtc.vp8.2-layer"; 4804 break; 4805 } 4806 case 3: 4807 { 4808 tsSchema = "webrtc.vp8.3-layer"; 4809 break; 4810 } 4811 default: 4812 { 4813 break; 4814 } 4815 } 4816 } 4817 notify->setString("ts-schema", tsSchema); 4818 } 4819 // Fall through to set up mime. 4820 } 4821 4822 default: 4823 { 4824 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 4825 // should be CodingUnused 4826 ALOGE("Raw port video compression format is %s(%d)", 4827 asString(videoDef->eCompressionFormat), 4828 videoDef->eCompressionFormat); 4829 return BAD_VALUE; 4830 } 4831 AString mime; 4832 if (GetMimeTypeForVideoCoding( 4833 videoDef->eCompressionFormat, &mime) != OK) { 4834 notify->setString("mime", "application/octet-stream"); 4835 } else { 4836 notify->setString("mime", mime.c_str()); 4837 } 4838 uint32_t intraRefreshPeriod = 0; 4839 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 4840 && intraRefreshPeriod > 0) { 4841 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 4842 } 4843 break; 4844 } 4845 } 4846 notify->setInt32("width", videoDef->nFrameWidth); 4847 notify->setInt32("height", videoDef->nFrameHeight); 4848 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 4849 portIndex == kPortIndexInput ? "input" : "output", 4850 notify->debugString().c_str()); 4851 4852 break; 4853 } 4854 4855 case OMX_PortDomainAudio: 4856 { 4857 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 4858 4859 switch ((int)audioDef->eEncoding) { 4860 case OMX_AUDIO_CodingPCM: 4861 { 4862 OMX_AUDIO_PARAM_PCMMODETYPE params; 4863 InitOMXParams(¶ms); 4864 params.nPortIndex = portIndex; 4865 4866 err = mOMX->getParameter( 4867 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4868 if (err != OK) { 4869 return err; 4870 } 4871 4872 if (params.nChannels <= 0 4873 || (params.nChannels != 1 && !params.bInterleaved) 4874 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 4875 ALOGE("unsupported PCM port: %u channels%s, %u-bit", 4876 params.nChannels, 4877 params.bInterleaved ? " interleaved" : "", 4878 params.nBitPerSample); 4879 return FAILED_TRANSACTION; 4880 } 4881 4882 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 4883 notify->setInt32("channel-count", params.nChannels); 4884 notify->setInt32("sample-rate", params.nSamplingRate); 4885 4886 AudioEncoding encoding = kAudioEncodingPcm16bit; 4887 if (params.eNumData == OMX_NumericalDataUnsigned 4888 && params.nBitPerSample == 8u) { 4889 encoding = kAudioEncodingPcm8bit; 4890 } else if (params.eNumData == OMX_NumericalDataFloat 4891 && params.nBitPerSample == 32u) { 4892 encoding = kAudioEncodingPcmFloat; 4893 } else if (params.nBitPerSample != 16u 4894 || params.eNumData != OMX_NumericalDataSigned) { 4895 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", 4896 asString(params.eNumData), params.eNumData, 4897 asString(params.ePCMMode), params.ePCMMode); 4898 return FAILED_TRANSACTION; 4899 } 4900 notify->setInt32("pcm-encoding", encoding); 4901 4902 if (mChannelMaskPresent) { 4903 notify->setInt32("channel-mask", mChannelMask); 4904 } 4905 break; 4906 } 4907 4908 case OMX_AUDIO_CodingAAC: 4909 { 4910 OMX_AUDIO_PARAM_AACPROFILETYPE params; 4911 InitOMXParams(¶ms); 4912 params.nPortIndex = portIndex; 4913 4914 err = mOMX->getParameter( 4915 mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 4916 if (err != OK) { 4917 return err; 4918 } 4919 4920 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 4921 notify->setInt32("channel-count", params.nChannels); 4922 notify->setInt32("sample-rate", params.nSampleRate); 4923 break; 4924 } 4925 4926 case OMX_AUDIO_CodingAMR: 4927 { 4928 OMX_AUDIO_PARAM_AMRTYPE params; 4929 InitOMXParams(¶ms); 4930 params.nPortIndex = portIndex; 4931 4932 err = mOMX->getParameter( 4933 mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 4934 if (err != OK) { 4935 return err; 4936 } 4937 4938 notify->setInt32("channel-count", 1); 4939 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 4940 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 4941 notify->setInt32("sample-rate", 16000); 4942 } else { 4943 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 4944 notify->setInt32("sample-rate", 8000); 4945 } 4946 break; 4947 } 4948 4949 case OMX_AUDIO_CodingFLAC: 4950 { 4951 OMX_AUDIO_PARAM_FLACTYPE params; 4952 InitOMXParams(¶ms); 4953 params.nPortIndex = portIndex; 4954 4955 err = mOMX->getParameter( 4956 mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 4957 if (err != OK) { 4958 return err; 4959 } 4960 4961 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 4962 notify->setInt32("channel-count", params.nChannels); 4963 notify->setInt32("sample-rate", params.nSampleRate); 4964 break; 4965 } 4966 4967 case OMX_AUDIO_CodingMP3: 4968 { 4969 OMX_AUDIO_PARAM_MP3TYPE params; 4970 InitOMXParams(¶ms); 4971 params.nPortIndex = portIndex; 4972 4973 err = mOMX->getParameter( 4974 mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 4975 if (err != OK) { 4976 return err; 4977 } 4978 4979 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 4980 notify->setInt32("channel-count", params.nChannels); 4981 notify->setInt32("sample-rate", params.nSampleRate); 4982 break; 4983 } 4984 4985 case OMX_AUDIO_CodingVORBIS: 4986 { 4987 OMX_AUDIO_PARAM_VORBISTYPE params; 4988 InitOMXParams(¶ms); 4989 params.nPortIndex = portIndex; 4990 4991 err = mOMX->getParameter( 4992 mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 4993 if (err != OK) { 4994 return err; 4995 } 4996 4997 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 4998 notify->setInt32("channel-count", params.nChannels); 4999 notify->setInt32("sample-rate", params.nSampleRate); 5000 break; 5001 } 5002 5003 case OMX_AUDIO_CodingAndroidAC3: 5004 { 5005 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 5006 InitOMXParams(¶ms); 5007 params.nPortIndex = portIndex; 5008 5009 err = mOMX->getParameter( 5010 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 5011 ¶ms, sizeof(params)); 5012 if (err != OK) { 5013 return err; 5014 } 5015 5016 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 5017 notify->setInt32("channel-count", params.nChannels); 5018 notify->setInt32("sample-rate", params.nSampleRate); 5019 break; 5020 } 5021 5022 case OMX_AUDIO_CodingAndroidEAC3: 5023 { 5024 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 5025 InitOMXParams(¶ms); 5026 params.nPortIndex = portIndex; 5027 5028 err = mOMX->getParameter( 5029 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 5030 ¶ms, sizeof(params)); 5031 if (err != OK) { 5032 return err; 5033 } 5034 5035 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 5036 notify->setInt32("channel-count", params.nChannels); 5037 notify->setInt32("sample-rate", params.nSampleRate); 5038 break; 5039 } 5040 5041 case OMX_AUDIO_CodingAndroidOPUS: 5042 { 5043 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 5044 InitOMXParams(¶ms); 5045 params.nPortIndex = portIndex; 5046 5047 err = mOMX->getParameter( 5048 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 5049 ¶ms, sizeof(params)); 5050 if (err != OK) { 5051 return err; 5052 } 5053 5054 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 5055 notify->setInt32("channel-count", params.nChannels); 5056 notify->setInt32("sample-rate", params.nSampleRate); 5057 break; 5058 } 5059 5060 case OMX_AUDIO_CodingG711: 5061 { 5062 OMX_AUDIO_PARAM_PCMMODETYPE params; 5063 InitOMXParams(¶ms); 5064 params.nPortIndex = portIndex; 5065 5066 err = mOMX->getParameter( 5067 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5068 if (err != OK) { 5069 return err; 5070 } 5071 5072 const char *mime = NULL; 5073 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 5074 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 5075 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 5076 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 5077 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 5078 mime = MEDIA_MIMETYPE_AUDIO_RAW; 5079 } 5080 notify->setString("mime", mime); 5081 notify->setInt32("channel-count", params.nChannels); 5082 notify->setInt32("sample-rate", params.nSamplingRate); 5083 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 5084 break; 5085 } 5086 5087 case OMX_AUDIO_CodingGSMFR: 5088 { 5089 OMX_AUDIO_PARAM_PCMMODETYPE params; 5090 InitOMXParams(¶ms); 5091 params.nPortIndex = portIndex; 5092 5093 err = mOMX->getParameter( 5094 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5095 if (err != OK) { 5096 return err; 5097 } 5098 5099 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 5100 notify->setInt32("channel-count", params.nChannels); 5101 notify->setInt32("sample-rate", params.nSamplingRate); 5102 break; 5103 } 5104 5105 default: 5106 ALOGE("Unsupported audio coding: %s(%d)\n", 5107 asString(audioDef->eEncoding), audioDef->eEncoding); 5108 return BAD_TYPE; 5109 } 5110 break; 5111 } 5112 5113 default: 5114 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 5115 return BAD_TYPE; 5116 } 5117 5118 return OK; 5119} 5120 5121void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 5122 // aspects are normally communicated in ColorAspects 5123 int32_t range, standard, transfer; 5124 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 5125 5126 // if some aspects are unspecified, use dataspace fields 5127 if (range != 0) { 5128 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 5129 } 5130 if (standard != 0) { 5131 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 5132 } 5133 if (transfer != 0) { 5134 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 5135 } 5136 5137 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 5138 if (range != 0) { 5139 mOutputFormat->setInt32("color-range", range); 5140 } 5141 if (standard != 0) { 5142 mOutputFormat->setInt32("color-standard", standard); 5143 } 5144 if (transfer != 0) { 5145 mOutputFormat->setInt32("color-transfer", transfer); 5146 } 5147 5148 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 5149 "(R:%d(%s), S:%d(%s), T:%d(%s))", 5150 dataSpace, 5151 aspects.mRange, asString(aspects.mRange), 5152 aspects.mPrimaries, asString(aspects.mPrimaries), 5153 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 5154 aspects.mTransfer, asString(aspects.mTransfer), 5155 range, asString((ColorRange)range), 5156 standard, asString((ColorStandard)standard), 5157 transfer, asString((ColorTransfer)transfer)); 5158} 5159 5160void ACodec::onOutputFormatChanged(sp<const AMessage> expectedFormat) { 5161 // store new output format, at the same time mark that this is no longer the first frame 5162 mOutputFormat = mBaseOutputFormat->dup(); 5163 5164 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 5165 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 5166 return; 5167 } 5168 5169 if (expectedFormat != NULL) { 5170 sp<const AMessage> changes = expectedFormat->changesFrom(mOutputFormat); 5171 sp<const AMessage> to = mOutputFormat->changesFrom(expectedFormat); 5172 if (changes->countEntries() != 0 || to->countEntries() != 0) { 5173 ALOGW("[%s] BAD CODEC: Output format changed unexpectedly from (diff) %s to (diff) %s", 5174 mComponentName.c_str(), 5175 changes->debugString(4).c_str(), to->debugString(4).c_str()); 5176 } 5177 } 5178 5179 if (!mIsVideo && !mIsEncoder) { 5180 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 5181 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5182 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 5183 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5184 5185 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 5186 if (mConverter[kPortIndexOutput] != NULL) { 5187 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 5188 } 5189 } 5190 5191 if (mTunneled) { 5192 sendFormatChange(); 5193 } 5194} 5195 5196void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 5197 AString mime; 5198 CHECK(mOutputFormat->findString("mime", &mime)); 5199 5200 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 5201 // notify renderer of the crop change and dataspace change 5202 // NOTE: native window uses extended right-bottom coordinate 5203 int32_t left, top, right, bottom; 5204 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 5205 notify->setRect("crop", left, top, right + 1, bottom + 1); 5206 } 5207 5208 int32_t dataSpace; 5209 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 5210 notify->setInt32("dataspace", dataSpace); 5211 } 5212 } 5213} 5214 5215void ACodec::sendFormatChange() { 5216 AString mime; 5217 CHECK(mOutputFormat->findString("mime", &mime)); 5218 5219 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 5220 int32_t channelCount, sampleRate; 5221 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 5222 CHECK(mOutputFormat->findInt32("sample-rate", &sampleRate)); 5223 if (mSampleRate != 0 && sampleRate != 0) { 5224 mEncoderDelay = mEncoderDelay * sampleRate / mSampleRate; 5225 mEncoderPadding = mEncoderPadding * sampleRate / mSampleRate; 5226 mSampleRate = sampleRate; 5227 } 5228 if (mSkipCutBuffer != NULL) { 5229 size_t prevbufsize = mSkipCutBuffer->size(); 5230 if (prevbufsize != 0) { 5231 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 5232 } 5233 } 5234 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 5235 } 5236 5237 sp<AMessage> notify = mNotify->dup(); 5238 notify->setInt32("what", kWhatOutputFormatChanged); 5239 notify->setMessage("format", mOutputFormat); 5240 notify->post(); 5241 5242 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 5243 mLastOutputFormat = mOutputFormat; 5244} 5245 5246void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 5247 sp<AMessage> notify = mNotify->dup(); 5248 notify->setInt32("what", CodecBase::kWhatError); 5249 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 5250 5251 if (internalError == UNKNOWN_ERROR) { // find better error code 5252 const status_t omxStatus = statusFromOMXError(error); 5253 if (omxStatus != 0) { 5254 internalError = omxStatus; 5255 } else { 5256 ALOGW("Invalid OMX error %#x", error); 5257 } 5258 } 5259 5260 mFatalError = true; 5261 5262 notify->setInt32("err", internalError); 5263 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5264 notify->post(); 5265} 5266 5267//////////////////////////////////////////////////////////////////////////////// 5268 5269ACodec::PortDescription::PortDescription() { 5270} 5271 5272status_t ACodec::requestIDRFrame() { 5273 if (!mIsEncoder) { 5274 return ERROR_UNSUPPORTED; 5275 } 5276 5277 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5278 InitOMXParams(¶ms); 5279 5280 params.nPortIndex = kPortIndexOutput; 5281 params.IntraRefreshVOP = OMX_TRUE; 5282 5283 return mOMX->setConfig( 5284 mNode, 5285 OMX_IndexConfigVideoIntraVOPRefresh, 5286 ¶ms, 5287 sizeof(params)); 5288} 5289 5290void ACodec::PortDescription::addBuffer( 5291 IOMX::buffer_id id, const sp<ABuffer> &buffer, 5292 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) { 5293 mBufferIDs.push_back(id); 5294 mBuffers.push_back(buffer); 5295 mHandles.push_back(handle); 5296 mMemRefs.push_back(memRef); 5297} 5298 5299size_t ACodec::PortDescription::countBuffers() { 5300 return mBufferIDs.size(); 5301} 5302 5303IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5304 return mBufferIDs.itemAt(index); 5305} 5306 5307sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5308 return mBuffers.itemAt(index); 5309} 5310 5311sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const { 5312 return mHandles.itemAt(index); 5313} 5314 5315sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const { 5316 return mMemRefs.itemAt(index); 5317} 5318 5319//////////////////////////////////////////////////////////////////////////////// 5320 5321ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5322 : AState(parentState), 5323 mCodec(codec) { 5324} 5325 5326ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5327 OMX_U32 /* portIndex */) { 5328 return KEEP_BUFFERS; 5329} 5330 5331bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5332 switch (msg->what()) { 5333 case kWhatInputBufferFilled: 5334 { 5335 onInputBufferFilled(msg); 5336 break; 5337 } 5338 5339 case kWhatOutputBufferDrained: 5340 { 5341 onOutputBufferDrained(msg); 5342 break; 5343 } 5344 5345 case ACodec::kWhatOMXMessageList: 5346 { 5347 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5348 } 5349 5350 case ACodec::kWhatOMXMessageItem: 5351 { 5352 // no need to check as we already did it for kWhatOMXMessageList 5353 return onOMXMessage(msg); 5354 } 5355 5356 case ACodec::kWhatOMXMessage: 5357 { 5358 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5359 } 5360 5361 case ACodec::kWhatSetSurface: 5362 { 5363 sp<AReplyToken> replyID; 5364 CHECK(msg->senderAwaitsResponse(&replyID)); 5365 5366 sp<RefBase> obj; 5367 CHECK(msg->findObject("surface", &obj)); 5368 5369 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5370 5371 sp<AMessage> response = new AMessage; 5372 response->setInt32("err", err); 5373 response->postReply(replyID); 5374 break; 5375 } 5376 5377 case ACodec::kWhatCreateInputSurface: 5378 case ACodec::kWhatSetInputSurface: 5379 case ACodec::kWhatSignalEndOfInputStream: 5380 { 5381 // This may result in an app illegal state exception. 5382 ALOGE("Message 0x%x was not handled", msg->what()); 5383 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5384 return true; 5385 } 5386 5387 case ACodec::kWhatOMXDied: 5388 { 5389 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5390 ALOGE("OMX/mediaserver died, signalling error!"); 5391 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5392 break; 5393 } 5394 5395 case ACodec::kWhatReleaseCodecInstance: 5396 { 5397 ALOGI("[%s] forcing the release of codec", 5398 mCodec->mComponentName.c_str()); 5399 status_t err = mCodec->mOMX->freeNode(mCodec->mNode); 5400 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5401 mCodec->mComponentName.c_str(), err); 5402 sp<AMessage> notify = mCodec->mNotify->dup(); 5403 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5404 notify->post(); 5405 break; 5406 } 5407 5408 default: 5409 return false; 5410 } 5411 5412 return true; 5413} 5414 5415bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5416 // there is a possibility that this is an outstanding message for a 5417 // codec that we have already destroyed 5418 if (mCodec->mNode == 0) { 5419 ALOGI("ignoring message as already freed component: %s", 5420 msg->debugString().c_str()); 5421 return false; 5422 } 5423 5424 IOMX::node_id nodeID; 5425 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 5426 if (nodeID != mCodec->mNode) { 5427 ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode); 5428 return false; 5429 } 5430 return true; 5431} 5432 5433bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5434 sp<RefBase> obj; 5435 CHECK(msg->findObject("messages", &obj)); 5436 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5437 5438 bool receivedRenderedEvents = false; 5439 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5440 it != msgList->getList().cend(); ++it) { 5441 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5442 mCodec->handleMessage(*it); 5443 int32_t type; 5444 CHECK((*it)->findInt32("type", &type)); 5445 if (type == omx_message::FRAME_RENDERED) { 5446 receivedRenderedEvents = true; 5447 } 5448 } 5449 5450 if (receivedRenderedEvents) { 5451 // NOTE: all buffers are rendered in this case 5452 mCodec->notifyOfRenderedFrames(); 5453 } 5454 return true; 5455} 5456 5457bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5458 int32_t type; 5459 CHECK(msg->findInt32("type", &type)); 5460 5461 switch (type) { 5462 case omx_message::EVENT: 5463 { 5464 int32_t event, data1, data2; 5465 CHECK(msg->findInt32("event", &event)); 5466 CHECK(msg->findInt32("data1", &data1)); 5467 CHECK(msg->findInt32("data2", &data2)); 5468 5469 if (event == OMX_EventCmdComplete 5470 && data1 == OMX_CommandFlush 5471 && data2 == (int32_t)OMX_ALL) { 5472 // Use of this notification is not consistent across 5473 // implementations. We'll drop this notification and rely 5474 // on flush-complete notifications on the individual port 5475 // indices instead. 5476 5477 return true; 5478 } 5479 5480 return onOMXEvent( 5481 static_cast<OMX_EVENTTYPE>(event), 5482 static_cast<OMX_U32>(data1), 5483 static_cast<OMX_U32>(data2)); 5484 } 5485 5486 case omx_message::EMPTY_BUFFER_DONE: 5487 { 5488 IOMX::buffer_id bufferID; 5489 int32_t fenceFd; 5490 5491 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5492 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5493 5494 return onOMXEmptyBufferDone(bufferID, fenceFd); 5495 } 5496 5497 case omx_message::FILL_BUFFER_DONE: 5498 { 5499 IOMX::buffer_id bufferID; 5500 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5501 5502 int32_t rangeOffset, rangeLength, flags, fenceFd; 5503 int64_t timeUs; 5504 5505 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5506 CHECK(msg->findInt32("range_length", &rangeLength)); 5507 CHECK(msg->findInt32("flags", &flags)); 5508 CHECK(msg->findInt64("timestamp", &timeUs)); 5509 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5510 5511 return onOMXFillBufferDone( 5512 bufferID, 5513 (size_t)rangeOffset, (size_t)rangeLength, 5514 (OMX_U32)flags, 5515 timeUs, 5516 fenceFd); 5517 } 5518 5519 case omx_message::FRAME_RENDERED: 5520 { 5521 int64_t mediaTimeUs, systemNano; 5522 5523 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5524 CHECK(msg->findInt64("system_nano", &systemNano)); 5525 5526 return onOMXFrameRendered( 5527 mediaTimeUs, systemNano); 5528 } 5529 5530 default: 5531 ALOGE("Unexpected message type: %d", type); 5532 return false; 5533 } 5534} 5535 5536bool ACodec::BaseState::onOMXFrameRendered( 5537 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5538 // ignore outside of Executing and PortSettingsChanged states 5539 return true; 5540} 5541 5542bool ACodec::BaseState::onOMXEvent( 5543 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5544 if (event == OMX_EventDataSpaceChanged) { 5545 ColorAspects aspects; 5546 aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF); 5547 aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF); 5548 aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF); 5549 aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF); 5550 5551 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5552 return true; 5553 } 5554 5555 if (event != OMX_EventError) { 5556 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5557 mCodec->mComponentName.c_str(), event, data1, data2); 5558 5559 return false; 5560 } 5561 5562 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5563 5564 // verify OMX component sends back an error we expect. 5565 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5566 if (!isOMXError(omxError)) { 5567 ALOGW("Invalid OMX error %#x", omxError); 5568 omxError = OMX_ErrorUndefined; 5569 } 5570 mCodec->signalError(omxError); 5571 5572 return true; 5573} 5574 5575bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5576 ALOGV("[%s] onOMXEmptyBufferDone %u", 5577 mCodec->mComponentName.c_str(), bufferID); 5578 5579 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5580 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5581 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5582 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5583 mCodec->dumpBuffers(kPortIndexInput); 5584 if (fenceFd >= 0) { 5585 ::close(fenceFd); 5586 } 5587 return false; 5588 } 5589 info->mStatus = BufferInfo::OWNED_BY_US; 5590 5591 // input buffers cannot take fences, so wait for any fence now 5592 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5593 fenceFd = -1; 5594 5595 // still save fence for completeness 5596 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5597 5598 // We're in "store-metadata-in-buffers" mode, the underlying 5599 // OMX component had access to data that's implicitly refcounted 5600 // by this "MediaBuffer" object. Now that the OMX component has 5601 // told us that it's done with the input buffer, we can decrement 5602 // the mediaBuffer's reference count. 5603 info->mData->setMediaBufferBase(NULL); 5604 5605 PortMode mode = getPortMode(kPortIndexInput); 5606 5607 switch (mode) { 5608 case KEEP_BUFFERS: 5609 break; 5610 5611 case RESUBMIT_BUFFERS: 5612 postFillThisBuffer(info); 5613 break; 5614 5615 case FREE_BUFFERS: 5616 default: 5617 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5618 return false; 5619 } 5620 5621 return true; 5622} 5623 5624void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5625 if (mCodec->mPortEOS[kPortIndexInput]) { 5626 return; 5627 } 5628 5629 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5630 5631 sp<AMessage> notify = mCodec->mNotify->dup(); 5632 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5633 notify->setInt32("buffer-id", info->mBufferID); 5634 5635 info->mData->meta()->clear(); 5636 notify->setBuffer("buffer", info->mData); 5637 5638 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5639 reply->setInt32("buffer-id", info->mBufferID); 5640 5641 notify->setMessage("reply", reply); 5642 5643 notify->post(); 5644 5645 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5646} 5647 5648void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5649 IOMX::buffer_id bufferID; 5650 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5651 sp<ABuffer> buffer; 5652 int32_t err = OK; 5653 bool eos = false; 5654 PortMode mode = getPortMode(kPortIndexInput); 5655 5656 if (!msg->findBuffer("buffer", &buffer)) { 5657 /* these are unfilled buffers returned by client */ 5658 CHECK(msg->findInt32("err", &err)); 5659 5660 if (err == OK) { 5661 /* buffers with no errors are returned on MediaCodec.flush */ 5662 mode = KEEP_BUFFERS; 5663 } else { 5664 ALOGV("[%s] saw error %d instead of an input buffer", 5665 mCodec->mComponentName.c_str(), err); 5666 eos = true; 5667 } 5668 5669 buffer.clear(); 5670 } 5671 5672 int32_t tmp; 5673 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5674 eos = true; 5675 err = ERROR_END_OF_STREAM; 5676 } 5677 5678 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5679 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5680 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5681 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5682 mCodec->dumpBuffers(kPortIndexInput); 5683 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5684 return; 5685 } 5686 5687 info->mStatus = BufferInfo::OWNED_BY_US; 5688 5689 switch (mode) { 5690 case KEEP_BUFFERS: 5691 { 5692 if (eos) { 5693 if (!mCodec->mPortEOS[kPortIndexInput]) { 5694 mCodec->mPortEOS[kPortIndexInput] = true; 5695 mCodec->mInputEOSResult = err; 5696 } 5697 } 5698 break; 5699 } 5700 5701 case RESUBMIT_BUFFERS: 5702 { 5703 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5704 // Do not send empty input buffer w/o EOS to the component. 5705 if (buffer->size() == 0 && !eos) { 5706 postFillThisBuffer(info); 5707 break; 5708 } 5709 5710 int64_t timeUs; 5711 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5712 5713 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5714 5715 MetadataBufferType metaType = mCodec->mInputMetadataType; 5716 int32_t isCSD = 0; 5717 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5718 if (mCodec->mIsLegacyVP9Decoder) { 5719 ALOGV("[%s] is legacy VP9 decoder. Ignore %u codec specific data", 5720 mCodec->mComponentName.c_str(), bufferID); 5721 postFillThisBuffer(info); 5722 break; 5723 } 5724 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5725 metaType = kMetadataBufferTypeInvalid; 5726 } 5727 5728 if (eos) { 5729 flags |= OMX_BUFFERFLAG_EOS; 5730 } 5731 5732 if (buffer != info->mCodecData) { 5733 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5734 mCodec->mComponentName.c_str(), 5735 bufferID, 5736 buffer.get(), info->mCodecData.get()); 5737 5738 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; 5739 if (converter == NULL || isCSD) { 5740 converter = getCopyConverter(); 5741 } 5742 status_t err = converter->convert(buffer, info->mCodecData); 5743 if (err != OK) { 5744 mCodec->signalError(OMX_ErrorUndefined, err); 5745 return; 5746 } 5747 } 5748 5749 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5750 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5751 mCodec->mComponentName.c_str(), bufferID); 5752 } else if (flags & OMX_BUFFERFLAG_EOS) { 5753 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5754 mCodec->mComponentName.c_str(), bufferID); 5755 } else { 5756#if TRACK_BUFFER_TIMING 5757 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5758 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5759#else 5760 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5761 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5762#endif 5763 } 5764 5765#if TRACK_BUFFER_TIMING 5766 ACodec::BufferStats stats; 5767 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5768 stats.mFillBufferDoneTimeUs = -1ll; 5769 mCodec->mBufferStats.add(timeUs, stats); 5770#endif 5771 5772 if (mCodec->storingMetadataInDecodedBuffers()) { 5773 // try to submit an output buffer for each input buffer 5774 PortMode outputMode = getPortMode(kPortIndexOutput); 5775 5776 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 5777 mCodec->mMetadataBuffersToSubmit, 5778 (outputMode == FREE_BUFFERS ? "FREE" : 5779 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 5780 if (outputMode == RESUBMIT_BUFFERS) { 5781 mCodec->submitOutputMetadataBuffer(); 5782 } 5783 } 5784 info->checkReadFence("onInputBufferFilled"); 5785 5786 status_t err2 = OK; 5787 switch (metaType) { 5788 case kMetadataBufferTypeInvalid: 5789 break; 5790#ifndef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5791 case kMetadataBufferTypeNativeHandleSource: 5792 if (info->mCodecData->size() >= sizeof(VideoNativeHandleMetadata)) { 5793 VideoNativeHandleMetadata *vnhmd = 5794 (VideoNativeHandleMetadata*)info->mCodecData->base(); 5795 err2 = mCodec->mOMX->updateNativeHandleInMeta( 5796 mCodec->mNode, kPortIndexInput, 5797 NativeHandle::create(vnhmd->pHandle, false /* ownsHandle */), 5798 bufferID); 5799 } 5800 break; 5801 case kMetadataBufferTypeANWBuffer: 5802 if (info->mCodecData->size() >= sizeof(VideoNativeMetadata)) { 5803 VideoNativeMetadata *vnmd = (VideoNativeMetadata*)info->mCodecData->base(); 5804 err2 = mCodec->mOMX->updateGraphicBufferInMeta( 5805 mCodec->mNode, kPortIndexInput, 5806 new GraphicBuffer(vnmd->pBuffer, false /* keepOwnership */), 5807 bufferID); 5808 } 5809 break; 5810#endif 5811 default: 5812 ALOGW("Can't marshall %s data in %zu sized buffers in %zu-bit mode", 5813 asString(metaType), info->mCodecData->size(), 5814 sizeof(buffer_handle_t) * 8); 5815 err2 = ERROR_UNSUPPORTED; 5816 break; 5817 } 5818 5819 if (err2 == OK) { 5820 err2 = mCodec->mOMX->emptyBuffer( 5821 mCodec->mNode, 5822 bufferID, 5823 0, 5824 info->mCodecData->size(), 5825 flags, 5826 timeUs, 5827 info->mFenceFd); 5828 } 5829 info->mFenceFd = -1; 5830 if (err2 != OK) { 5831 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5832 return; 5833 } 5834 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5835 5836 if (!eos && err == OK) { 5837 getMoreInputDataIfPossible(); 5838 } else { 5839 ALOGV("[%s] Signalled EOS (%d) on the input port", 5840 mCodec->mComponentName.c_str(), err); 5841 5842 mCodec->mPortEOS[kPortIndexInput] = true; 5843 mCodec->mInputEOSResult = err; 5844 } 5845 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 5846 if (err != OK && err != ERROR_END_OF_STREAM) { 5847 ALOGV("[%s] Signalling EOS on the input port due to error %d", 5848 mCodec->mComponentName.c_str(), err); 5849 } else { 5850 ALOGV("[%s] Signalling EOS on the input port", 5851 mCodec->mComponentName.c_str()); 5852 } 5853 5854 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 5855 mCodec->mComponentName.c_str(), bufferID); 5856 5857 info->checkReadFence("onInputBufferFilled"); 5858 status_t err2 = mCodec->mOMX->emptyBuffer( 5859 mCodec->mNode, 5860 bufferID, 5861 0, 5862 0, 5863 OMX_BUFFERFLAG_EOS, 5864 0, 5865 info->mFenceFd); 5866 info->mFenceFd = -1; 5867 if (err2 != OK) { 5868 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5869 return; 5870 } 5871 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5872 5873 mCodec->mPortEOS[kPortIndexInput] = true; 5874 mCodec->mInputEOSResult = err; 5875 } 5876 break; 5877 } 5878 5879 case FREE_BUFFERS: 5880 break; 5881 5882 default: 5883 ALOGE("invalid port mode: %d", mode); 5884 break; 5885 } 5886} 5887 5888void ACodec::BaseState::getMoreInputDataIfPossible() { 5889 if (mCodec->mPortEOS[kPortIndexInput]) { 5890 return; 5891 } 5892 5893 BufferInfo *eligible = NULL; 5894 5895 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 5896 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5897 5898#if 0 5899 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 5900 // There's already a "read" pending. 5901 return; 5902 } 5903#endif 5904 5905 if (info->mStatus == BufferInfo::OWNED_BY_US) { 5906 eligible = info; 5907 } 5908 } 5909 5910 if (eligible == NULL) { 5911 return; 5912 } 5913 5914 postFillThisBuffer(eligible); 5915} 5916 5917bool ACodec::BaseState::onOMXFillBufferDone( 5918 IOMX::buffer_id bufferID, 5919 size_t rangeOffset, size_t rangeLength, 5920 OMX_U32 flags, 5921 int64_t timeUs, 5922 int fenceFd) { 5923 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 5924 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 5925 5926 ssize_t index; 5927 status_t err= OK; 5928 5929#if TRACK_BUFFER_TIMING 5930 index = mCodec->mBufferStats.indexOfKey(timeUs); 5931 if (index >= 0) { 5932 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 5933 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 5934 5935 ALOGI("frame PTS %lld: %lld", 5936 timeUs, 5937 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 5938 5939 mCodec->mBufferStats.removeItemsAt(index); 5940 stats = NULL; 5941 } 5942#endif 5943 5944 BufferInfo *info = 5945 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5946 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5947 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5948 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5949 mCodec->dumpBuffers(kPortIndexOutput); 5950 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5951 if (fenceFd >= 0) { 5952 ::close(fenceFd); 5953 } 5954 return true; 5955 } 5956 5957 info->mDequeuedAt = ++mCodec->mDequeueCounter; 5958 info->mStatus = BufferInfo::OWNED_BY_US; 5959 5960 if (info->mRenderInfo != NULL) { 5961 // The fence for an emptied buffer must have signaled, but there still could be queued 5962 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 5963 // as we will soon requeue this buffer to the surface. While in theory we could still keep 5964 // track of buffers that are requeued to the surface, it is better to add support to the 5965 // buffer-queue to notify us of released buffers and their fences (in the future). 5966 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 5967 } 5968 5969 // byte buffers cannot take fences, so wait for any fence now 5970 if (mCodec->mNativeWindow == NULL) { 5971 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 5972 fenceFd = -1; 5973 } 5974 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 5975 5976 PortMode mode = getPortMode(kPortIndexOutput); 5977 5978 switch (mode) { 5979 case KEEP_BUFFERS: 5980 break; 5981 5982 case RESUBMIT_BUFFERS: 5983 { 5984 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 5985 || mCodec->mPortEOS[kPortIndexOutput])) { 5986 ALOGV("[%s] calling fillBuffer %u", 5987 mCodec->mComponentName.c_str(), info->mBufferID); 5988 5989 err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 5990 info->mFenceFd = -1; 5991 if (err != OK) { 5992 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5993 return true; 5994 } 5995 5996 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5997 break; 5998 } 5999 6000 sp<AMessage> reply = 6001 new AMessage(kWhatOutputBufferDrained, mCodec); 6002 6003 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 6004 // pretend that output format has changed on the first frame (we used to do this) 6005 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) { 6006 mCodec->onOutputFormatChanged(mCodec->mOutputFormat); 6007 } 6008 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 6009 mCodec->sendFormatChange(); 6010 } 6011 6012 if (mCodec->usingMetadataOnEncoderOutput()) { 6013 native_handle_t *handle = NULL; 6014 VideoNativeHandleMetadata &nativeMeta = 6015 *(VideoNativeHandleMetadata *)info->mData->data(); 6016 if (info->mData->size() >= sizeof(nativeMeta) 6017 && nativeMeta.eType == kMetadataBufferTypeNativeHandleSource) { 6018#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 6019 // handle is only valid on 32-bit/mediaserver process 6020 handle = NULL; 6021#else 6022 handle = (native_handle_t *)nativeMeta.pHandle; 6023#endif 6024 } 6025 info->mData->meta()->setPointer("handle", handle); 6026 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 6027 info->mData->meta()->setInt32("rangeLength", rangeLength); 6028 } else if (info->mData == info->mCodecData) { 6029 info->mData->setRange(rangeOffset, rangeLength); 6030 } else { 6031 info->mCodecData->setRange(rangeOffset, rangeLength); 6032 // in this case we know that mConverter is not null 6033 status_t err = mCodec->mConverter[kPortIndexOutput]->convert( 6034 info->mCodecData, info->mData); 6035 if (err != OK) { 6036 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6037 return true; 6038 } 6039 } 6040#if 0 6041 if (mCodec->mNativeWindow == NULL) { 6042 if (IsIDR(info->mData)) { 6043 ALOGI("IDR frame"); 6044 } 6045 } 6046#endif 6047 6048 if (mCodec->mSkipCutBuffer != NULL) { 6049 mCodec->mSkipCutBuffer->submit(info->mData); 6050 } 6051 info->mData->meta()->setInt64("timeUs", timeUs); 6052 6053 sp<AMessage> notify = mCodec->mNotify->dup(); 6054 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 6055 notify->setInt32("buffer-id", info->mBufferID); 6056 notify->setBuffer("buffer", info->mData); 6057 notify->setInt32("flags", flags); 6058 6059 reply->setInt32("buffer-id", info->mBufferID); 6060 6061 notify->setMessage("reply", reply); 6062 6063 notify->post(); 6064 6065 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 6066 6067 if (flags & OMX_BUFFERFLAG_EOS) { 6068 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 6069 6070 sp<AMessage> notify = mCodec->mNotify->dup(); 6071 notify->setInt32("what", CodecBase::kWhatEOS); 6072 notify->setInt32("err", mCodec->mInputEOSResult); 6073 notify->post(); 6074 6075 mCodec->mPortEOS[kPortIndexOutput] = true; 6076 } 6077 break; 6078 } 6079 6080 case FREE_BUFFERS: 6081 err = mCodec->freeBuffer(kPortIndexOutput, index); 6082 if (err != OK) { 6083 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6084 return true; 6085 } 6086 break; 6087 6088 default: 6089 ALOGE("Invalid port mode: %d", mode); 6090 return false; 6091 } 6092 6093 return true; 6094} 6095 6096void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 6097 IOMX::buffer_id bufferID; 6098 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 6099 ssize_t index; 6100 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6101 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6102 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 6103 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6104 mCodec->dumpBuffers(kPortIndexOutput); 6105 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6106 return; 6107 } 6108 6109 android_native_rect_t crop; 6110 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom)) { 6111 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 6112 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 6113 } 6114 6115 int32_t dataSpace; 6116 if (msg->findInt32("dataspace", &dataSpace)) { 6117 status_t err = native_window_set_buffers_data_space( 6118 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 6119 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 6120 } 6121 6122 int32_t render; 6123 if (mCodec->mNativeWindow != NULL 6124 && msg->findInt32("render", &render) && render != 0 6125 && info->mData != NULL && info->mData->size() != 0) { 6126 ATRACE_NAME("render"); 6127 // The client wants this buffer to be rendered. 6128 6129 // save buffers sent to the surface so we can get render time when they return 6130 int64_t mediaTimeUs = -1; 6131 info->mData->meta()->findInt64("timeUs", &mediaTimeUs); 6132 if (mediaTimeUs >= 0) { 6133 mCodec->mRenderTracker.onFrameQueued( 6134 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 6135 } 6136 6137 int64_t timestampNs = 0; 6138 if (!msg->findInt64("timestampNs", ×tampNs)) { 6139 // use media timestamp if client did not request a specific render timestamp 6140 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 6141 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 6142 timestampNs *= 1000; 6143 } 6144 } 6145 6146 status_t err; 6147 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 6148 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 6149 6150 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 6151 err = mCodec->mNativeWindow->queueBuffer( 6152 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 6153 info->mFenceFd = -1; 6154 if (err == OK) { 6155 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 6156 } else { 6157 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 6158 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6159 info->mStatus = BufferInfo::OWNED_BY_US; 6160 // keeping read fence as write fence to avoid clobbering 6161 info->mIsReadFence = false; 6162 } 6163 } else { 6164 if (mCodec->mNativeWindow != NULL && 6165 (info->mData == NULL || info->mData->size() != 0)) { 6166 // move read fence into write fence to avoid clobbering 6167 info->mIsReadFence = false; 6168 ATRACE_NAME("frame-drop"); 6169 } 6170 info->mStatus = BufferInfo::OWNED_BY_US; 6171 } 6172 6173 PortMode mode = getPortMode(kPortIndexOutput); 6174 6175 switch (mode) { 6176 case KEEP_BUFFERS: 6177 { 6178 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 6179 6180 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6181 // We cannot resubmit the buffer we just rendered, dequeue 6182 // the spare instead. 6183 6184 info = mCodec->dequeueBufferFromNativeWindow(); 6185 } 6186 break; 6187 } 6188 6189 case RESUBMIT_BUFFERS: 6190 { 6191 if (!mCodec->mPortEOS[kPortIndexOutput]) { 6192 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6193 // We cannot resubmit the buffer we just rendered, dequeue 6194 // the spare instead. 6195 6196 info = mCodec->dequeueBufferFromNativeWindow(); 6197 } 6198 6199 if (info != NULL) { 6200 ALOGV("[%s] calling fillBuffer %u", 6201 mCodec->mComponentName.c_str(), info->mBufferID); 6202 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 6203 status_t err = mCodec->mOMX->fillBuffer( 6204 mCodec->mNode, info->mBufferID, info->mFenceFd); 6205 info->mFenceFd = -1; 6206 if (err == OK) { 6207 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6208 } else { 6209 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6210 } 6211 } 6212 } 6213 break; 6214 } 6215 6216 case FREE_BUFFERS: 6217 { 6218 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 6219 if (err != OK) { 6220 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6221 } 6222 break; 6223 } 6224 6225 default: 6226 ALOGE("Invalid port mode: %d", mode); 6227 return; 6228 } 6229} 6230 6231//////////////////////////////////////////////////////////////////////////////// 6232 6233ACodec::UninitializedState::UninitializedState(ACodec *codec) 6234 : BaseState(codec) { 6235} 6236 6237void ACodec::UninitializedState::stateEntered() { 6238 ALOGV("Now uninitialized"); 6239 6240 if (mDeathNotifier != NULL) { 6241 mCodec->mNodeBinder->unlinkToDeath(mDeathNotifier); 6242 mDeathNotifier.clear(); 6243 } 6244 6245 mCodec->mUsingNativeWindow = false; 6246 mCodec->mNativeWindow.clear(); 6247 mCodec->mNativeWindowUsageBits = 0; 6248 mCodec->mNode = 0; 6249 mCodec->mOMX.clear(); 6250 mCodec->mQuirks = 0; 6251 mCodec->mFlags = 0; 6252 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 6253 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 6254 mCodec->mConverter[0].clear(); 6255 mCodec->mConverter[1].clear(); 6256 mCodec->mComponentName.clear(); 6257} 6258 6259bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 6260 bool handled = false; 6261 6262 switch (msg->what()) { 6263 case ACodec::kWhatSetup: 6264 { 6265 onSetup(msg); 6266 6267 handled = true; 6268 break; 6269 } 6270 6271 case ACodec::kWhatAllocateComponent: 6272 { 6273 onAllocateComponent(msg); 6274 handled = true; 6275 break; 6276 } 6277 6278 case ACodec::kWhatShutdown: 6279 { 6280 int32_t keepComponentAllocated; 6281 CHECK(msg->findInt32( 6282 "keepComponentAllocated", &keepComponentAllocated)); 6283 ALOGW_IF(keepComponentAllocated, 6284 "cannot keep component allocated on shutdown in Uninitialized state"); 6285 6286 sp<AMessage> notify = mCodec->mNotify->dup(); 6287 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6288 notify->post(); 6289 6290 handled = true; 6291 break; 6292 } 6293 6294 case ACodec::kWhatFlush: 6295 { 6296 sp<AMessage> notify = mCodec->mNotify->dup(); 6297 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6298 notify->post(); 6299 6300 handled = true; 6301 break; 6302 } 6303 6304 case ACodec::kWhatReleaseCodecInstance: 6305 { 6306 // nothing to do, as we have already signaled shutdown 6307 handled = true; 6308 break; 6309 } 6310 6311 default: 6312 return BaseState::onMessageReceived(msg); 6313 } 6314 6315 return handled; 6316} 6317 6318void ACodec::UninitializedState::onSetup( 6319 const sp<AMessage> &msg) { 6320 if (onAllocateComponent(msg) 6321 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6322 mCodec->mLoadedState->onStart(); 6323 } 6324} 6325 6326bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6327 ALOGV("onAllocateComponent"); 6328 6329 CHECK(mCodec->mNode == 0); 6330 6331 OMXClient client; 6332 if (client.connect() != OK) { 6333 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6334 return false; 6335 } 6336 6337 sp<IOMX> omx = client.interface(); 6338 6339 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6340 6341 Vector<AString> matchingCodecs; 6342 6343 AString mime; 6344 6345 AString componentName; 6346 uint32_t quirks = 0; 6347 int32_t encoder = false; 6348 if (msg->findString("componentName", &componentName)) { 6349 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6350 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6351 matchingCodecs.add(componentName); 6352 } 6353 } else { 6354 CHECK(msg->findString("mime", &mime)); 6355 6356 if (!msg->findInt32("encoder", &encoder)) { 6357 encoder = false; 6358 } 6359 6360 MediaCodecList::findMatchingCodecs( 6361 mime.c_str(), 6362 encoder, // createEncoder 6363 0, // flags 6364 &matchingCodecs); 6365 } 6366 6367 sp<CodecObserver> observer = new CodecObserver; 6368 IOMX::node_id node = 0; 6369 6370 status_t err = NAME_NOT_FOUND; 6371 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6372 ++matchIndex) { 6373 componentName = matchingCodecs[matchIndex]; 6374 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6375 6376 pid_t tid = gettid(); 6377 int prevPriority = androidGetThreadPriority(tid); 6378 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6379 err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node); 6380 androidSetThreadPriority(tid, prevPriority); 6381 6382 if (err == OK) { 6383 break; 6384 } else { 6385 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6386 } 6387 6388 node = 0; 6389 } 6390 6391 if (node == 0) { 6392 if (!mime.empty()) { 6393 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6394 encoder ? "en" : "de", mime.c_str(), err); 6395 } else { 6396 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6397 } 6398 6399 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6400 return false; 6401 } 6402 6403 mDeathNotifier = new DeathNotifier(notify); 6404 if (mCodec->mNodeBinder == NULL || 6405 mCodec->mNodeBinder->linkToDeath(mDeathNotifier) != OK) { 6406 // This was a local binder, if it dies so do we, we won't care 6407 // about any notifications in the afterlife. 6408 mDeathNotifier.clear(); 6409 } 6410 6411 notify = new AMessage(kWhatOMXMessageList, mCodec); 6412 observer->setNotificationMessage(notify); 6413 6414 mCodec->mComponentName = componentName; 6415 mCodec->mRenderTracker.setComponentName(componentName); 6416 mCodec->mFlags = 0; 6417 6418 if (componentName.endsWith(".secure")) { 6419 mCodec->mFlags |= kFlagIsSecure; 6420 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6421 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6422 } 6423 6424 mCodec->mQuirks = quirks; 6425 mCodec->mOMX = omx; 6426 mCodec->mNode = node; 6427 6428 { 6429 sp<AMessage> notify = mCodec->mNotify->dup(); 6430 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6431 notify->setString("componentName", mCodec->mComponentName.c_str()); 6432 notify->post(); 6433 } 6434 6435 mCodec->changeState(mCodec->mLoadedState); 6436 6437 return true; 6438} 6439 6440//////////////////////////////////////////////////////////////////////////////// 6441 6442ACodec::LoadedState::LoadedState(ACodec *codec) 6443 : BaseState(codec) { 6444} 6445 6446void ACodec::LoadedState::stateEntered() { 6447 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6448 6449 mCodec->mPortEOS[kPortIndexInput] = 6450 mCodec->mPortEOS[kPortIndexOutput] = false; 6451 6452 mCodec->mInputEOSResult = OK; 6453 6454 mCodec->mDequeueCounter = 0; 6455 mCodec->mMetadataBuffersToSubmit = 0; 6456 mCodec->mRepeatFrameDelayUs = -1ll; 6457 mCodec->mInputFormat.clear(); 6458 mCodec->mOutputFormat.clear(); 6459 mCodec->mBaseOutputFormat.clear(); 6460 6461 if (mCodec->mShutdownInProgress) { 6462 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6463 6464 mCodec->mShutdownInProgress = false; 6465 mCodec->mKeepComponentAllocated = false; 6466 6467 onShutdown(keepComponentAllocated); 6468 } 6469 mCodec->mExplicitShutdown = false; 6470 6471 mCodec->processDeferredMessages(); 6472} 6473 6474void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6475 if (!keepComponentAllocated) { 6476 (void)mCodec->mOMX->freeNode(mCodec->mNode); 6477 6478 mCodec->changeState(mCodec->mUninitializedState); 6479 } 6480 6481 if (mCodec->mExplicitShutdown) { 6482 sp<AMessage> notify = mCodec->mNotify->dup(); 6483 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6484 notify->post(); 6485 mCodec->mExplicitShutdown = false; 6486 } 6487} 6488 6489bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6490 bool handled = false; 6491 6492 switch (msg->what()) { 6493 case ACodec::kWhatConfigureComponent: 6494 { 6495 onConfigureComponent(msg); 6496 handled = true; 6497 break; 6498 } 6499 6500 case ACodec::kWhatCreateInputSurface: 6501 { 6502 onCreateInputSurface(msg); 6503 handled = true; 6504 break; 6505 } 6506 6507 case ACodec::kWhatSetInputSurface: 6508 { 6509 onSetInputSurface(msg); 6510 handled = true; 6511 break; 6512 } 6513 6514 case ACodec::kWhatStart: 6515 { 6516 onStart(); 6517 handled = true; 6518 break; 6519 } 6520 6521 case ACodec::kWhatShutdown: 6522 { 6523 int32_t keepComponentAllocated; 6524 CHECK(msg->findInt32( 6525 "keepComponentAllocated", &keepComponentAllocated)); 6526 6527 mCodec->mExplicitShutdown = true; 6528 onShutdown(keepComponentAllocated); 6529 6530 handled = true; 6531 break; 6532 } 6533 6534 case ACodec::kWhatFlush: 6535 { 6536 sp<AMessage> notify = mCodec->mNotify->dup(); 6537 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6538 notify->post(); 6539 6540 handled = true; 6541 break; 6542 } 6543 6544 default: 6545 return BaseState::onMessageReceived(msg); 6546 } 6547 6548 return handled; 6549} 6550 6551bool ACodec::LoadedState::onConfigureComponent( 6552 const sp<AMessage> &msg) { 6553 ALOGV("onConfigureComponent"); 6554 6555 CHECK(mCodec->mNode != 0); 6556 6557 status_t err = OK; 6558 AString mime; 6559 if (!msg->findString("mime", &mime)) { 6560 err = BAD_VALUE; 6561 } else { 6562 err = mCodec->configureCodec(mime.c_str(), msg); 6563 } 6564 if (err != OK) { 6565 ALOGE("[%s] configureCodec returning error %d", 6566 mCodec->mComponentName.c_str(), err); 6567 6568 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6569 return false; 6570 } 6571 6572 { 6573 sp<AMessage> notify = mCodec->mNotify->dup(); 6574 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6575 notify->setMessage("input-format", mCodec->mInputFormat); 6576 notify->setMessage("output-format", mCodec->mOutputFormat); 6577 notify->post(); 6578 } 6579 6580 return true; 6581} 6582 6583status_t ACodec::LoadedState::setupInputSurface() { 6584 status_t err = OK; 6585 6586 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6587 err = mCodec->mOMX->setInternalOption( 6588 mCodec->mNode, 6589 kPortIndexInput, 6590 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 6591 &mCodec->mRepeatFrameDelayUs, 6592 sizeof(mCodec->mRepeatFrameDelayUs)); 6593 6594 if (err != OK) { 6595 ALOGE("[%s] Unable to configure option to repeat previous " 6596 "frames (err %d)", 6597 mCodec->mComponentName.c_str(), 6598 err); 6599 return err; 6600 } 6601 } 6602 6603 if (mCodec->mMaxPtsGapUs > 0ll) { 6604 err = mCodec->mOMX->setInternalOption( 6605 mCodec->mNode, 6606 kPortIndexInput, 6607 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 6608 &mCodec->mMaxPtsGapUs, 6609 sizeof(mCodec->mMaxPtsGapUs)); 6610 6611 if (err != OK) { 6612 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6613 mCodec->mComponentName.c_str(), 6614 err); 6615 return err; 6616 } 6617 } 6618 6619 if (mCodec->mMaxFps > 0) { 6620 err = mCodec->mOMX->setInternalOption( 6621 mCodec->mNode, 6622 kPortIndexInput, 6623 IOMX::INTERNAL_OPTION_MAX_FPS, 6624 &mCodec->mMaxFps, 6625 sizeof(mCodec->mMaxFps)); 6626 6627 if (err != OK) { 6628 ALOGE("[%s] Unable to configure max fps (err %d)", 6629 mCodec->mComponentName.c_str(), 6630 err); 6631 return err; 6632 } 6633 } 6634 6635 if (mCodec->mTimePerCaptureUs > 0ll 6636 && mCodec->mTimePerFrameUs > 0ll) { 6637 int64_t timeLapse[2]; 6638 timeLapse[0] = mCodec->mTimePerFrameUs; 6639 timeLapse[1] = mCodec->mTimePerCaptureUs; 6640 err = mCodec->mOMX->setInternalOption( 6641 mCodec->mNode, 6642 kPortIndexInput, 6643 IOMX::INTERNAL_OPTION_TIME_LAPSE, 6644 &timeLapse[0], 6645 sizeof(timeLapse)); 6646 6647 if (err != OK) { 6648 ALOGE("[%s] Unable to configure time lapse (err %d)", 6649 mCodec->mComponentName.c_str(), 6650 err); 6651 return err; 6652 } 6653 } 6654 6655 if (mCodec->mCreateInputBuffersSuspended) { 6656 bool suspend = true; 6657 err = mCodec->mOMX->setInternalOption( 6658 mCodec->mNode, 6659 kPortIndexInput, 6660 IOMX::INTERNAL_OPTION_SUSPEND, 6661 &suspend, 6662 sizeof(suspend)); 6663 6664 if (err != OK) { 6665 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6666 mCodec->mComponentName.c_str(), 6667 err); 6668 return err; 6669 } 6670 } 6671 6672 uint32_t usageBits; 6673 if (mCodec->mOMX->getParameter( 6674 mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6675 &usageBits, sizeof(usageBits)) == OK) { 6676 mCodec->mInputFormat->setInt32( 6677 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6678 } 6679 6680 sp<ABuffer> colorAspectsBuffer; 6681 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6682 err = mCodec->mOMX->setInternalOption( 6683 mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS, 6684 colorAspectsBuffer->base(), colorAspectsBuffer->capacity()); 6685 if (err != OK) { 6686 ALOGE("[%s] Unable to configure color aspects (err %d)", 6687 mCodec->mComponentName.c_str(), err); 6688 return err; 6689 } 6690 } 6691 return OK; 6692} 6693 6694void ACodec::LoadedState::onCreateInputSurface( 6695 const sp<AMessage> & /* msg */) { 6696 ALOGV("onCreateInputSurface"); 6697 6698 sp<AMessage> notify = mCodec->mNotify->dup(); 6699 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6700 6701 android_dataspace dataSpace; 6702 status_t err = 6703 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6704 notify->setMessage("input-format", mCodec->mInputFormat); 6705 notify->setMessage("output-format", mCodec->mOutputFormat); 6706 6707 sp<IGraphicBufferProducer> bufferProducer; 6708 if (err == OK) { 6709 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6710 err = mCodec->mOMX->createInputSurface( 6711 mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, 6712 &mCodec->mInputMetadataType); 6713 // framework uses ANW buffers internally instead of gralloc handles 6714 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6715 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6716 } 6717 } 6718 6719 if (err == OK) { 6720 err = setupInputSurface(); 6721 } 6722 6723 if (err == OK) { 6724 notify->setObject("input-surface", 6725 new BufferProducerWrapper(bufferProducer)); 6726 } else { 6727 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6728 // the error through because it's in the "configured" state. We 6729 // send a kWhatInputSurfaceCreated with an error value instead. 6730 ALOGE("[%s] onCreateInputSurface returning error %d", 6731 mCodec->mComponentName.c_str(), err); 6732 notify->setInt32("err", err); 6733 } 6734 notify->post(); 6735} 6736 6737void ACodec::LoadedState::onSetInputSurface( 6738 const sp<AMessage> &msg) { 6739 ALOGV("onSetInputSurface"); 6740 6741 sp<AMessage> notify = mCodec->mNotify->dup(); 6742 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6743 6744 sp<RefBase> obj; 6745 CHECK(msg->findObject("input-surface", &obj)); 6746 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6747 6748 android_dataspace dataSpace; 6749 status_t err = 6750 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6751 notify->setMessage("input-format", mCodec->mInputFormat); 6752 notify->setMessage("output-format", mCodec->mOutputFormat); 6753 6754 if (err == OK) { 6755 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6756 err = mCodec->mOMX->setInputSurface( 6757 mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(), 6758 &mCodec->mInputMetadataType); 6759 // framework uses ANW buffers internally instead of gralloc handles 6760 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6761 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6762 } 6763 } 6764 6765 if (err == OK) { 6766 surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace); 6767 err = setupInputSurface(); 6768 } 6769 6770 if (err != OK) { 6771 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6772 // the error through because it's in the "configured" state. We 6773 // send a kWhatInputSurfaceAccepted with an error value instead. 6774 ALOGE("[%s] onSetInputSurface returning error %d", 6775 mCodec->mComponentName.c_str(), err); 6776 notify->setInt32("err", err); 6777 } 6778 notify->post(); 6779} 6780 6781void ACodec::LoadedState::onStart() { 6782 ALOGV("onStart"); 6783 6784 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6785 if (err != OK) { 6786 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6787 } else { 6788 mCodec->changeState(mCodec->mLoadedToIdleState); 6789 } 6790} 6791 6792//////////////////////////////////////////////////////////////////////////////// 6793 6794ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 6795 : BaseState(codec) { 6796} 6797 6798void ACodec::LoadedToIdleState::stateEntered() { 6799 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 6800 6801 status_t err; 6802 if ((err = allocateBuffers()) != OK) { 6803 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 6804 "(error 0x%08x)", 6805 err); 6806 6807 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6808 6809 mCodec->mOMX->sendCommand( 6810 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 6811 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 6812 mCodec->freeBuffersOnPort(kPortIndexInput); 6813 } 6814 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 6815 mCodec->freeBuffersOnPort(kPortIndexOutput); 6816 } 6817 6818 mCodec->changeState(mCodec->mLoadedState); 6819 } 6820} 6821 6822status_t ACodec::LoadedToIdleState::allocateBuffers() { 6823 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 6824 6825 if (err != OK) { 6826 return err; 6827 } 6828 6829 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 6830} 6831 6832bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 6833 switch (msg->what()) { 6834 case kWhatSetParameters: 6835 case kWhatShutdown: 6836 { 6837 mCodec->deferMessage(msg); 6838 return true; 6839 } 6840 6841 case kWhatSignalEndOfInputStream: 6842 { 6843 mCodec->onSignalEndOfInputStream(); 6844 return true; 6845 } 6846 6847 case kWhatResume: 6848 { 6849 // We'll be active soon enough. 6850 return true; 6851 } 6852 6853 case kWhatFlush: 6854 { 6855 // We haven't even started yet, so we're flushed alright... 6856 sp<AMessage> notify = mCodec->mNotify->dup(); 6857 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6858 notify->post(); 6859 return true; 6860 } 6861 6862 default: 6863 return BaseState::onMessageReceived(msg); 6864 } 6865} 6866 6867bool ACodec::LoadedToIdleState::onOMXEvent( 6868 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6869 switch (event) { 6870 case OMX_EventCmdComplete: 6871 { 6872 status_t err = OK; 6873 if (data1 != (OMX_U32)OMX_CommandStateSet 6874 || data2 != (OMX_U32)OMX_StateIdle) { 6875 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 6876 asString((OMX_COMMANDTYPE)data1), data1, 6877 asString((OMX_STATETYPE)data2), data2); 6878 err = FAILED_TRANSACTION; 6879 } 6880 6881 if (err == OK) { 6882 err = mCodec->mOMX->sendCommand( 6883 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting); 6884 } 6885 6886 if (err != OK) { 6887 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6888 } else { 6889 mCodec->changeState(mCodec->mIdleToExecutingState); 6890 } 6891 6892 return true; 6893 } 6894 6895 default: 6896 return BaseState::onOMXEvent(event, data1, data2); 6897 } 6898} 6899 6900//////////////////////////////////////////////////////////////////////////////// 6901 6902ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 6903 : BaseState(codec) { 6904} 6905 6906void ACodec::IdleToExecutingState::stateEntered() { 6907 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 6908} 6909 6910bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6911 switch (msg->what()) { 6912 case kWhatSetParameters: 6913 case kWhatShutdown: 6914 { 6915 mCodec->deferMessage(msg); 6916 return true; 6917 } 6918 6919 case kWhatResume: 6920 { 6921 // We'll be active soon enough. 6922 return true; 6923 } 6924 6925 case kWhatFlush: 6926 { 6927 // We haven't even started yet, so we're flushed alright... 6928 sp<AMessage> notify = mCodec->mNotify->dup(); 6929 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6930 notify->post(); 6931 6932 return true; 6933 } 6934 6935 case kWhatSignalEndOfInputStream: 6936 { 6937 mCodec->onSignalEndOfInputStream(); 6938 return true; 6939 } 6940 6941 default: 6942 return BaseState::onMessageReceived(msg); 6943 } 6944} 6945 6946bool ACodec::IdleToExecutingState::onOMXEvent( 6947 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6948 switch (event) { 6949 case OMX_EventCmdComplete: 6950 { 6951 if (data1 != (OMX_U32)OMX_CommandStateSet 6952 || data2 != (OMX_U32)OMX_StateExecuting) { 6953 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 6954 asString((OMX_COMMANDTYPE)data1), data1, 6955 asString((OMX_STATETYPE)data2), data2); 6956 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6957 return true; 6958 } 6959 6960 mCodec->mExecutingState->resume(); 6961 mCodec->changeState(mCodec->mExecutingState); 6962 6963 return true; 6964 } 6965 6966 default: 6967 return BaseState::onOMXEvent(event, data1, data2); 6968 } 6969} 6970 6971//////////////////////////////////////////////////////////////////////////////// 6972 6973ACodec::ExecutingState::ExecutingState(ACodec *codec) 6974 : BaseState(codec), 6975 mActive(false) { 6976} 6977 6978ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 6979 OMX_U32 /* portIndex */) { 6980 return RESUBMIT_BUFFERS; 6981} 6982 6983void ACodec::ExecutingState::submitOutputMetaBuffers() { 6984 // submit as many buffers as there are input buffers with the codec 6985 // in case we are in port reconfiguring 6986 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 6987 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6988 6989 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 6990 if (mCodec->submitOutputMetadataBuffer() != OK) 6991 break; 6992 } 6993 } 6994 6995 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 6996 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 6997} 6998 6999void ACodec::ExecutingState::submitRegularOutputBuffers() { 7000 bool failed = false; 7001 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 7002 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 7003 7004 if (mCodec->mNativeWindow != NULL) { 7005 if (info->mStatus != BufferInfo::OWNED_BY_US 7006 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 7007 ALOGE("buffers should be owned by us or the surface"); 7008 failed = true; 7009 break; 7010 } 7011 7012 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 7013 continue; 7014 } 7015 } else { 7016 if (info->mStatus != BufferInfo::OWNED_BY_US) { 7017 ALOGE("buffers should be owned by us"); 7018 failed = true; 7019 break; 7020 } 7021 } 7022 7023 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 7024 7025 info->checkWriteFence("submitRegularOutputBuffers"); 7026 status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 7027 info->mFenceFd = -1; 7028 if (err != OK) { 7029 failed = true; 7030 break; 7031 } 7032 7033 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 7034 } 7035 7036 if (failed) { 7037 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7038 } 7039} 7040 7041void ACodec::ExecutingState::submitOutputBuffers() { 7042 submitRegularOutputBuffers(); 7043 if (mCodec->storingMetadataInDecodedBuffers()) { 7044 submitOutputMetaBuffers(); 7045 } 7046} 7047 7048void ACodec::ExecutingState::resume() { 7049 if (mActive) { 7050 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 7051 return; 7052 } 7053 7054 submitOutputBuffers(); 7055 7056 // Post all available input buffers 7057 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 7058 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 7059 } 7060 7061 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 7062 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7063 if (info->mStatus == BufferInfo::OWNED_BY_US) { 7064 postFillThisBuffer(info); 7065 } 7066 } 7067 7068 mActive = true; 7069} 7070 7071void ACodec::ExecutingState::stateEntered() { 7072 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 7073 7074 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7075 mCodec->processDeferredMessages(); 7076} 7077 7078bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 7079 bool handled = false; 7080 7081 switch (msg->what()) { 7082 case kWhatShutdown: 7083 { 7084 int32_t keepComponentAllocated; 7085 CHECK(msg->findInt32( 7086 "keepComponentAllocated", &keepComponentAllocated)); 7087 7088 mCodec->mShutdownInProgress = true; 7089 mCodec->mExplicitShutdown = true; 7090 mCodec->mKeepComponentAllocated = keepComponentAllocated; 7091 7092 mActive = false; 7093 7094 status_t err = mCodec->mOMX->sendCommand( 7095 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 7096 if (err != OK) { 7097 if (keepComponentAllocated) { 7098 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7099 } 7100 // TODO: do some recovery here. 7101 } else { 7102 mCodec->changeState(mCodec->mExecutingToIdleState); 7103 } 7104 7105 handled = true; 7106 break; 7107 } 7108 7109 case kWhatFlush: 7110 { 7111 ALOGV("[%s] ExecutingState flushing now " 7112 "(codec owns %zu/%zu input, %zu/%zu output).", 7113 mCodec->mComponentName.c_str(), 7114 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 7115 mCodec->mBuffers[kPortIndexInput].size(), 7116 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 7117 mCodec->mBuffers[kPortIndexOutput].size()); 7118 7119 mActive = false; 7120 7121 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL); 7122 if (err != OK) { 7123 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7124 } else { 7125 mCodec->changeState(mCodec->mFlushingState); 7126 } 7127 7128 handled = true; 7129 break; 7130 } 7131 7132 case kWhatResume: 7133 { 7134 resume(); 7135 7136 handled = true; 7137 break; 7138 } 7139 7140 case kWhatRequestIDRFrame: 7141 { 7142 status_t err = mCodec->requestIDRFrame(); 7143 if (err != OK) { 7144 ALOGW("Requesting an IDR frame failed."); 7145 } 7146 7147 handled = true; 7148 break; 7149 } 7150 7151 case kWhatSetParameters: 7152 { 7153 sp<AMessage> params; 7154 CHECK(msg->findMessage("params", ¶ms)); 7155 7156 status_t err = mCodec->setParameters(params); 7157 7158 sp<AMessage> reply; 7159 if (msg->findMessage("reply", &reply)) { 7160 reply->setInt32("err", err); 7161 reply->post(); 7162 } 7163 7164 handled = true; 7165 break; 7166 } 7167 7168 case ACodec::kWhatSignalEndOfInputStream: 7169 { 7170 mCodec->onSignalEndOfInputStream(); 7171 handled = true; 7172 break; 7173 } 7174 7175 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7176 case kWhatSubmitOutputMetadataBufferIfEOS: 7177 { 7178 if (mCodec->mPortEOS[kPortIndexInput] && 7179 !mCodec->mPortEOS[kPortIndexOutput]) { 7180 status_t err = mCodec->submitOutputMetadataBuffer(); 7181 if (err == OK) { 7182 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7183 } 7184 } 7185 return true; 7186 } 7187 7188 default: 7189 handled = BaseState::onMessageReceived(msg); 7190 break; 7191 } 7192 7193 return handled; 7194} 7195 7196status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 7197 int32_t videoBitrate; 7198 if (params->findInt32("video-bitrate", &videoBitrate)) { 7199 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 7200 InitOMXParams(&configParams); 7201 configParams.nPortIndex = kPortIndexOutput; 7202 configParams.nEncodeBitrate = videoBitrate; 7203 7204 status_t err = mOMX->setConfig( 7205 mNode, 7206 OMX_IndexConfigVideoBitrate, 7207 &configParams, 7208 sizeof(configParams)); 7209 7210 if (err != OK) { 7211 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 7212 videoBitrate, err); 7213 7214 return err; 7215 } 7216 } 7217 7218 int64_t skipFramesBeforeUs; 7219 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 7220 status_t err = 7221 mOMX->setInternalOption( 7222 mNode, 7223 kPortIndexInput, 7224 IOMX::INTERNAL_OPTION_START_TIME, 7225 &skipFramesBeforeUs, 7226 sizeof(skipFramesBeforeUs)); 7227 7228 if (err != OK) { 7229 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 7230 return err; 7231 } 7232 } 7233 7234 int32_t dropInputFrames; 7235 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 7236 bool suspend = dropInputFrames != 0; 7237 7238 status_t err = 7239 mOMX->setInternalOption( 7240 mNode, 7241 kPortIndexInput, 7242 IOMX::INTERNAL_OPTION_SUSPEND, 7243 &suspend, 7244 sizeof(suspend)); 7245 7246 if (err != OK) { 7247 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 7248 return err; 7249 } 7250 } 7251 7252 int32_t dummy; 7253 if (params->findInt32("request-sync", &dummy)) { 7254 status_t err = requestIDRFrame(); 7255 7256 if (err != OK) { 7257 ALOGE("Requesting a sync frame failed w/ err %d", err); 7258 return err; 7259 } 7260 } 7261 7262 float rate; 7263 if (params->findFloat("operating-rate", &rate) && rate > 0) { 7264 status_t err = setOperatingRate(rate, mIsVideo); 7265 if (err != OK) { 7266 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 7267 return err; 7268 } 7269 } 7270 7271 int32_t intraRefreshPeriod = 0; 7272 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 7273 && intraRefreshPeriod > 0) { 7274 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 7275 if (err != OK) { 7276 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 7277 mComponentName.c_str()); 7278 err = OK; 7279 } 7280 } 7281 7282 return OK; 7283} 7284 7285void ACodec::onSignalEndOfInputStream() { 7286 sp<AMessage> notify = mNotify->dup(); 7287 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 7288 7289 status_t err = mOMX->signalEndOfInputStream(mNode); 7290 if (err != OK) { 7291 notify->setInt32("err", err); 7292 } 7293 notify->post(); 7294} 7295 7296bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 7297 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7298 return true; 7299} 7300 7301bool ACodec::ExecutingState::onOMXEvent( 7302 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7303 switch (event) { 7304 case OMX_EventPortSettingsChanged: 7305 { 7306 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 7307 7308 mCodec->onOutputFormatChanged(); 7309 7310 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 7311 mCodec->mMetadataBuffersToSubmit = 0; 7312 CHECK_EQ(mCodec->mOMX->sendCommand( 7313 mCodec->mNode, 7314 OMX_CommandPortDisable, kPortIndexOutput), 7315 (status_t)OK); 7316 7317 mCodec->freeOutputBuffersNotOwnedByComponent(); 7318 7319 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7320 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7321 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7322 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7323 mCodec->mComponentName.c_str(), data2); 7324 } 7325 7326 return true; 7327 } 7328 7329 case OMX_EventBufferFlag: 7330 { 7331 return true; 7332 } 7333 7334 default: 7335 return BaseState::onOMXEvent(event, data1, data2); 7336 } 7337} 7338 7339//////////////////////////////////////////////////////////////////////////////// 7340 7341ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7342 ACodec *codec) 7343 : BaseState(codec) { 7344} 7345 7346ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7347 OMX_U32 portIndex) { 7348 if (portIndex == kPortIndexOutput) { 7349 return FREE_BUFFERS; 7350 } 7351 7352 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7353 7354 return RESUBMIT_BUFFERS; 7355} 7356 7357bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7358 const sp<AMessage> &msg) { 7359 bool handled = false; 7360 7361 switch (msg->what()) { 7362 case kWhatFlush: 7363 case kWhatShutdown: 7364 case kWhatResume: 7365 case kWhatSetParameters: 7366 { 7367 if (msg->what() == kWhatResume) { 7368 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7369 } 7370 7371 mCodec->deferMessage(msg); 7372 handled = true; 7373 break; 7374 } 7375 7376 default: 7377 handled = BaseState::onMessageReceived(msg); 7378 break; 7379 } 7380 7381 return handled; 7382} 7383 7384void ACodec::OutputPortSettingsChangedState::stateEntered() { 7385 ALOGV("[%s] Now handling output port settings change", 7386 mCodec->mComponentName.c_str()); 7387} 7388 7389bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7390 int64_t mediaTimeUs, nsecs_t systemNano) { 7391 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7392 return true; 7393} 7394 7395bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7396 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7397 switch (event) { 7398 case OMX_EventCmdComplete: 7399 { 7400 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7401 if (data2 != (OMX_U32)kPortIndexOutput) { 7402 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7403 return false; 7404 } 7405 7406 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7407 7408 status_t err = OK; 7409 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7410 ALOGE("disabled port should be empty, but has %zu buffers", 7411 mCodec->mBuffers[kPortIndexOutput].size()); 7412 err = FAILED_TRANSACTION; 7413 } else { 7414 mCodec->mDealer[kPortIndexOutput].clear(); 7415 } 7416 7417 if (err == OK) { 7418 err = mCodec->mOMX->sendCommand( 7419 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput); 7420 } 7421 7422 if (err == OK) { 7423 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7424 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7425 "reconfiguration: (%d)", err); 7426 } 7427 7428 if (err != OK) { 7429 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7430 7431 // This is technically not correct, but appears to be 7432 // the only way to free the component instance. 7433 // Controlled transitioning from excecuting->idle 7434 // and idle->loaded seem impossible probably because 7435 // the output port never finishes re-enabling. 7436 mCodec->mShutdownInProgress = true; 7437 mCodec->mKeepComponentAllocated = false; 7438 mCodec->changeState(mCodec->mLoadedState); 7439 } 7440 7441 return true; 7442 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7443 if (data2 != (OMX_U32)kPortIndexOutput) { 7444 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7445 return false; 7446 } 7447 7448 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7449 7450 if (mCodec->mExecutingState->active()) { 7451 mCodec->mExecutingState->submitOutputBuffers(); 7452 } 7453 7454 mCodec->changeState(mCodec->mExecutingState); 7455 7456 return true; 7457 } 7458 7459 return false; 7460 } 7461 7462 default: 7463 return false; 7464 } 7465} 7466 7467//////////////////////////////////////////////////////////////////////////////// 7468 7469ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7470 : BaseState(codec), 7471 mComponentNowIdle(false) { 7472} 7473 7474bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7475 bool handled = false; 7476 7477 switch (msg->what()) { 7478 case kWhatFlush: 7479 { 7480 // Don't send me a flush request if you previously wanted me 7481 // to shutdown. 7482 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7483 break; 7484 } 7485 7486 case kWhatShutdown: 7487 { 7488 // We're already doing that... 7489 7490 handled = true; 7491 break; 7492 } 7493 7494 default: 7495 handled = BaseState::onMessageReceived(msg); 7496 break; 7497 } 7498 7499 return handled; 7500} 7501 7502void ACodec::ExecutingToIdleState::stateEntered() { 7503 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7504 7505 mComponentNowIdle = false; 7506 mCodec->mLastOutputFormat.clear(); 7507} 7508 7509bool ACodec::ExecutingToIdleState::onOMXEvent( 7510 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7511 switch (event) { 7512 case OMX_EventCmdComplete: 7513 { 7514 if (data1 != (OMX_U32)OMX_CommandStateSet 7515 || data2 != (OMX_U32)OMX_StateIdle) { 7516 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7517 asString((OMX_COMMANDTYPE)data1), data1, 7518 asString((OMX_STATETYPE)data2), data2); 7519 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7520 return true; 7521 } 7522 7523 mComponentNowIdle = true; 7524 7525 changeStateIfWeOwnAllBuffers(); 7526 7527 return true; 7528 } 7529 7530 case OMX_EventPortSettingsChanged: 7531 case OMX_EventBufferFlag: 7532 { 7533 // We're shutting down and don't care about this anymore. 7534 return true; 7535 } 7536 7537 default: 7538 return BaseState::onOMXEvent(event, data1, data2); 7539 } 7540} 7541 7542void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7543 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7544 status_t err = mCodec->mOMX->sendCommand( 7545 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7546 if (err == OK) { 7547 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7548 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7549 if (err == OK) { 7550 err = err2; 7551 } 7552 } 7553 7554 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7555 && mCodec->mNativeWindow != NULL) { 7556 // We push enough 1x1 blank buffers to ensure that one of 7557 // them has made it to the display. This allows the OMX 7558 // component teardown to zero out any protected buffers 7559 // without the risk of scanning out one of those buffers. 7560 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7561 } 7562 7563 if (err != OK) { 7564 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7565 return; 7566 } 7567 7568 mCodec->changeState(mCodec->mIdleToLoadedState); 7569 } 7570} 7571 7572void ACodec::ExecutingToIdleState::onInputBufferFilled( 7573 const sp<AMessage> &msg) { 7574 BaseState::onInputBufferFilled(msg); 7575 7576 changeStateIfWeOwnAllBuffers(); 7577} 7578 7579void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7580 const sp<AMessage> &msg) { 7581 BaseState::onOutputBufferDrained(msg); 7582 7583 changeStateIfWeOwnAllBuffers(); 7584} 7585 7586//////////////////////////////////////////////////////////////////////////////// 7587 7588ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7589 : BaseState(codec) { 7590} 7591 7592bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7593 bool handled = false; 7594 7595 switch (msg->what()) { 7596 case kWhatShutdown: 7597 { 7598 // We're already doing that... 7599 7600 handled = true; 7601 break; 7602 } 7603 7604 case kWhatFlush: 7605 { 7606 // Don't send me a flush request if you previously wanted me 7607 // to shutdown. 7608 ALOGE("Got flush request in IdleToLoadedState"); 7609 break; 7610 } 7611 7612 default: 7613 handled = BaseState::onMessageReceived(msg); 7614 break; 7615 } 7616 7617 return handled; 7618} 7619 7620void ACodec::IdleToLoadedState::stateEntered() { 7621 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7622} 7623 7624bool ACodec::IdleToLoadedState::onOMXEvent( 7625 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7626 switch (event) { 7627 case OMX_EventCmdComplete: 7628 { 7629 if (data1 != (OMX_U32)OMX_CommandStateSet 7630 || data2 != (OMX_U32)OMX_StateLoaded) { 7631 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7632 asString((OMX_COMMANDTYPE)data1), data1, 7633 asString((OMX_STATETYPE)data2), data2); 7634 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7635 return true; 7636 } 7637 7638 mCodec->changeState(mCodec->mLoadedState); 7639 7640 return true; 7641 } 7642 7643 default: 7644 return BaseState::onOMXEvent(event, data1, data2); 7645 } 7646} 7647 7648//////////////////////////////////////////////////////////////////////////////// 7649 7650ACodec::FlushingState::FlushingState(ACodec *codec) 7651 : BaseState(codec) { 7652} 7653 7654void ACodec::FlushingState::stateEntered() { 7655 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7656 7657 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7658} 7659 7660bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7661 bool handled = false; 7662 7663 switch (msg->what()) { 7664 case kWhatShutdown: 7665 { 7666 mCodec->deferMessage(msg); 7667 break; 7668 } 7669 7670 case kWhatFlush: 7671 { 7672 // We're already doing this right now. 7673 handled = true; 7674 break; 7675 } 7676 7677 default: 7678 handled = BaseState::onMessageReceived(msg); 7679 break; 7680 } 7681 7682 return handled; 7683} 7684 7685bool ACodec::FlushingState::onOMXEvent( 7686 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7687 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7688 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7689 7690 switch (event) { 7691 case OMX_EventCmdComplete: 7692 { 7693 if (data1 != (OMX_U32)OMX_CommandFlush) { 7694 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7695 asString((OMX_COMMANDTYPE)data1), data1, data2); 7696 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7697 return true; 7698 } 7699 7700 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7701 if (mFlushComplete[data2]) { 7702 ALOGW("Flush already completed for %s port", 7703 data2 == kPortIndexInput ? "input" : "output"); 7704 return true; 7705 } 7706 mFlushComplete[data2] = true; 7707 7708 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7709 changeStateIfWeOwnAllBuffers(); 7710 } 7711 } else if (data2 == OMX_ALL) { 7712 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7713 ALOGW("received flush complete event for OMX_ALL before ports have been" 7714 "flushed (%d/%d)", 7715 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7716 return false; 7717 } 7718 7719 changeStateIfWeOwnAllBuffers(); 7720 } else { 7721 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7722 } 7723 7724 return true; 7725 } 7726 7727 case OMX_EventPortSettingsChanged: 7728 { 7729 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7730 msg->setInt32("type", omx_message::EVENT); 7731 msg->setInt32("node", mCodec->mNode); 7732 msg->setInt32("event", event); 7733 msg->setInt32("data1", data1); 7734 msg->setInt32("data2", data2); 7735 7736 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7737 mCodec->mComponentName.c_str()); 7738 7739 mCodec->deferMessage(msg); 7740 7741 return true; 7742 } 7743 7744 default: 7745 return BaseState::onOMXEvent(event, data1, data2); 7746 } 7747 7748 return true; 7749} 7750 7751void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7752 BaseState::onOutputBufferDrained(msg); 7753 7754 changeStateIfWeOwnAllBuffers(); 7755} 7756 7757void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7758 BaseState::onInputBufferFilled(msg); 7759 7760 changeStateIfWeOwnAllBuffers(); 7761} 7762 7763void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 7764 if (mFlushComplete[kPortIndexInput] 7765 && mFlushComplete[kPortIndexOutput] 7766 && mCodec->allYourBuffersAreBelongToUs()) { 7767 // We now own all buffers except possibly those still queued with 7768 // the native window for rendering. Let's get those back as well. 7769 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 7770 7771 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7772 7773 sp<AMessage> notify = mCodec->mNotify->dup(); 7774 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7775 notify->post(); 7776 7777 mCodec->mPortEOS[kPortIndexInput] = 7778 mCodec->mPortEOS[kPortIndexOutput] = false; 7779 7780 mCodec->mInputEOSResult = OK; 7781 7782 if (mCodec->mSkipCutBuffer != NULL) { 7783 mCodec->mSkipCutBuffer->clear(); 7784 } 7785 7786 mCodec->changeState(mCodec->mExecutingState); 7787 } 7788} 7789 7790status_t ACodec::queryCapabilities( 7791 const AString &name, const AString &mime, bool isEncoder, 7792 sp<MediaCodecInfo::Capabilities> *caps) { 7793 (*caps).clear(); 7794 const char *role = getComponentRole(isEncoder, mime.c_str()); 7795 if (role == NULL) { 7796 return BAD_VALUE; 7797 } 7798 7799 OMXClient client; 7800 status_t err = client.connect(); 7801 if (err != OK) { 7802 return err; 7803 } 7804 7805 sp<IOMX> omx = client.interface(); 7806 sp<CodecObserver> observer = new CodecObserver; 7807 IOMX::node_id node = 0; 7808 7809 err = omx->allocateNode(name.c_str(), observer, NULL, &node); 7810 if (err != OK) { 7811 client.disconnect(); 7812 return err; 7813 } 7814 7815 err = setComponentRole(omx, node, role); 7816 if (err != OK) { 7817 omx->freeNode(node); 7818 client.disconnect(); 7819 return err; 7820 } 7821 7822 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 7823 bool isVideo = mime.startsWithIgnoreCase("video/"); 7824 7825 if (isVideo) { 7826 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 7827 InitOMXParams(¶m); 7828 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7829 7830 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7831 param.nProfileIndex = index; 7832 status_t err = omx->getParameter( 7833 node, OMX_IndexParamVideoProfileLevelQuerySupported, 7834 ¶m, sizeof(param)); 7835 if (err != OK) { 7836 break; 7837 } 7838 builder->addProfileLevel(param.eProfile, param.eLevel); 7839 7840 if (index == kMaxIndicesToCheck) { 7841 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 7842 name.c_str(), index, 7843 param.eProfile, param.eLevel); 7844 } 7845 } 7846 7847 // Color format query 7848 // return colors in the order reported by the OMX component 7849 // prefix "flexible" standard ones with the flexible equivalent 7850 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 7851 InitOMXParams(&portFormat); 7852 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 7853 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 7854 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7855 portFormat.nIndex = index; 7856 status_t err = omx->getParameter( 7857 node, OMX_IndexParamVideoPortFormat, 7858 &portFormat, sizeof(portFormat)); 7859 if (err != OK) { 7860 break; 7861 } 7862 7863 OMX_U32 flexibleEquivalent; 7864 if (isFlexibleColorFormat( 7865 omx, node, portFormat.eColorFormat, false /* usingNativeWindow */, 7866 &flexibleEquivalent)) { 7867 bool marked = false; 7868 for (size_t i = 0; i < supportedColors.size(); ++i) { 7869 if (supportedColors[i] == flexibleEquivalent) { 7870 marked = true; 7871 break; 7872 } 7873 } 7874 if (!marked) { 7875 supportedColors.push(flexibleEquivalent); 7876 builder->addColorFormat(flexibleEquivalent); 7877 } 7878 } 7879 supportedColors.push(portFormat.eColorFormat); 7880 builder->addColorFormat(portFormat.eColorFormat); 7881 7882 if (index == kMaxIndicesToCheck) { 7883 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 7884 name.c_str(), index, 7885 asString(portFormat.eColorFormat), portFormat.eColorFormat); 7886 } 7887 } 7888 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 7889 // More audio codecs if they have profiles. 7890 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 7891 InitOMXParams(¶m); 7892 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7893 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7894 param.nProfileIndex = index; 7895 status_t err = omx->getParameter( 7896 node, (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 7897 ¶m, sizeof(param)); 7898 if (err != OK) { 7899 break; 7900 } 7901 // For audio, level is ignored. 7902 builder->addProfileLevel(param.eProfile, 0 /* level */); 7903 7904 if (index == kMaxIndicesToCheck) { 7905 ALOGW("[%s] stopping checking profiles after %u: %x", 7906 name.c_str(), index, 7907 param.eProfile); 7908 } 7909 } 7910 7911 // NOTE: Without Android extensions, OMX does not provide a way to query 7912 // AAC profile support 7913 if (param.nProfileIndex == 0) { 7914 ALOGW("component %s doesn't support profile query.", name.c_str()); 7915 } 7916 } 7917 7918 if (isVideo && !isEncoder) { 7919 native_handle_t *sidebandHandle = NULL; 7920 if (omx->configureVideoTunnelMode( 7921 node, kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 7922 // tunneled playback includes adaptive playback 7923 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 7924 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 7925 } else if (omx->storeMetaDataInBuffers( 7926 node, kPortIndexOutput, OMX_TRUE) == OK || 7927 omx->prepareForAdaptivePlayback( 7928 node, kPortIndexOutput, OMX_TRUE, 7929 1280 /* width */, 720 /* height */) == OK) { 7930 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 7931 } 7932 } 7933 7934 if (isVideo && isEncoder) { 7935 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 7936 InitOMXParams(¶ms); 7937 params.nPortIndex = kPortIndexOutput; 7938 // TODO: should we verify if fallback is supported? 7939 if (omx->getConfig( 7940 node, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 7941 ¶ms, sizeof(params)) == OK) { 7942 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 7943 } 7944 } 7945 7946 *caps = builder; 7947 omx->freeNode(node); 7948 client.disconnect(); 7949 return OK; 7950} 7951 7952// These are supposed be equivalent to the logic in 7953// "audio_channel_out_mask_from_count". 7954//static 7955status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 7956 switch (numChannels) { 7957 case 1: 7958 map[0] = OMX_AUDIO_ChannelCF; 7959 break; 7960 case 2: 7961 map[0] = OMX_AUDIO_ChannelLF; 7962 map[1] = OMX_AUDIO_ChannelRF; 7963 break; 7964 case 3: 7965 map[0] = OMX_AUDIO_ChannelLF; 7966 map[1] = OMX_AUDIO_ChannelRF; 7967 map[2] = OMX_AUDIO_ChannelCF; 7968 break; 7969 case 4: 7970 map[0] = OMX_AUDIO_ChannelLF; 7971 map[1] = OMX_AUDIO_ChannelRF; 7972 map[2] = OMX_AUDIO_ChannelLR; 7973 map[3] = OMX_AUDIO_ChannelRR; 7974 break; 7975 case 5: 7976 map[0] = OMX_AUDIO_ChannelLF; 7977 map[1] = OMX_AUDIO_ChannelRF; 7978 map[2] = OMX_AUDIO_ChannelCF; 7979 map[3] = OMX_AUDIO_ChannelLR; 7980 map[4] = OMX_AUDIO_ChannelRR; 7981 break; 7982 case 6: 7983 map[0] = OMX_AUDIO_ChannelLF; 7984 map[1] = OMX_AUDIO_ChannelRF; 7985 map[2] = OMX_AUDIO_ChannelCF; 7986 map[3] = OMX_AUDIO_ChannelLFE; 7987 map[4] = OMX_AUDIO_ChannelLR; 7988 map[5] = OMX_AUDIO_ChannelRR; 7989 break; 7990 case 7: 7991 map[0] = OMX_AUDIO_ChannelLF; 7992 map[1] = OMX_AUDIO_ChannelRF; 7993 map[2] = OMX_AUDIO_ChannelCF; 7994 map[3] = OMX_AUDIO_ChannelLFE; 7995 map[4] = OMX_AUDIO_ChannelLR; 7996 map[5] = OMX_AUDIO_ChannelRR; 7997 map[6] = OMX_AUDIO_ChannelCS; 7998 break; 7999 case 8: 8000 map[0] = OMX_AUDIO_ChannelLF; 8001 map[1] = OMX_AUDIO_ChannelRF; 8002 map[2] = OMX_AUDIO_ChannelCF; 8003 map[3] = OMX_AUDIO_ChannelLFE; 8004 map[4] = OMX_AUDIO_ChannelLR; 8005 map[5] = OMX_AUDIO_ChannelRR; 8006 map[6] = OMX_AUDIO_ChannelLS; 8007 map[7] = OMX_AUDIO_ChannelRS; 8008 break; 8009 default: 8010 return -EINVAL; 8011 } 8012 8013 return OK; 8014} 8015 8016} // namespace android 8017