ACodec.cpp revision b7e75437b160fd8fe239f62f690af6ff0c2a7df6
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47 48#include <OMX_AudioExt.h> 49#include <OMX_VideoExt.h> 50#include <OMX_Component.h> 51#include <OMX_IndexExt.h> 52#include <OMX_AsString.h> 53 54#include "include/avc_utils.h" 55#include "include/DataConverter.h" 56#include "omx/OMXUtils.h" 57 58namespace android { 59 60enum { 61 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 62}; 63 64// OMX errors are directly mapped into status_t range if 65// there is no corresponding MediaError status code. 66// Use the statusFromOMXError(int32_t omxError) function. 67// 68// Currently this is a direct map. 69// See frameworks/native/include/media/openmax/OMX_Core.h 70// 71// Vendor OMX errors from 0x90000000 - 0x9000FFFF 72// Extension OMX errors from 0x8F000000 - 0x90000000 73// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 74// 75 76// returns true if err is a recognized OMX error code. 77// as OMX error is OMX_S32, this is an int32_t type 78static inline bool isOMXError(int32_t err) { 79 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 80} 81 82// converts an OMX error to a status_t 83static inline status_t statusFromOMXError(int32_t omxError) { 84 switch (omxError) { 85 case OMX_ErrorInvalidComponentName: 86 case OMX_ErrorComponentNotFound: 87 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 88 default: 89 return isOMXError(omxError) ? omxError : 0; // no translation required 90 } 91} 92 93// checks and converts status_t to a non-side-effect status_t 94static inline status_t makeNoSideEffectStatus(status_t err) { 95 switch (err) { 96 // the following errors have side effects and may come 97 // from other code modules. Remap for safety reasons. 98 case INVALID_OPERATION: 99 case DEAD_OBJECT: 100 return UNKNOWN_ERROR; 101 default: 102 return err; 103 } 104} 105 106struct MessageList : public RefBase { 107 MessageList() { 108 } 109 virtual ~MessageList() { 110 } 111 std::list<sp<AMessage> > &getList() { return mList; } 112private: 113 std::list<sp<AMessage> > mList; 114 115 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 116}; 117 118static sp<DataConverter> getCopyConverter() { 119 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited 120 static sp<DataConverter> sCopyConverter; // zero-inited 121 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); 122 return sCopyConverter; 123} 124 125struct CodecObserver : public BnOMXObserver { 126 CodecObserver() {} 127 128 void setNotificationMessage(const sp<AMessage> &msg) { 129 mNotify = msg; 130 } 131 132 // from IOMXObserver 133 virtual void onMessages(const std::list<omx_message> &messages) { 134 if (messages.empty()) { 135 return; 136 } 137 138 sp<AMessage> notify = mNotify->dup(); 139 bool first = true; 140 sp<MessageList> msgList = new MessageList(); 141 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 142 it != messages.cend(); ++it) { 143 const omx_message &omx_msg = *it; 144 if (first) { 145 notify->setInt32("node", omx_msg.node); 146 first = false; 147 } 148 149 sp<AMessage> msg = new AMessage; 150 msg->setInt32("type", omx_msg.type); 151 switch (omx_msg.type) { 152 case omx_message::EVENT: 153 { 154 msg->setInt32("event", omx_msg.u.event_data.event); 155 msg->setInt32("data1", omx_msg.u.event_data.data1); 156 msg->setInt32("data2", omx_msg.u.event_data.data2); 157 break; 158 } 159 160 case omx_message::EMPTY_BUFFER_DONE: 161 { 162 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 163 msg->setInt32("fence_fd", omx_msg.fenceFd); 164 break; 165 } 166 167 case omx_message::FILL_BUFFER_DONE: 168 { 169 msg->setInt32( 170 "buffer", omx_msg.u.extended_buffer_data.buffer); 171 msg->setInt32( 172 "range_offset", 173 omx_msg.u.extended_buffer_data.range_offset); 174 msg->setInt32( 175 "range_length", 176 omx_msg.u.extended_buffer_data.range_length); 177 msg->setInt32( 178 "flags", 179 omx_msg.u.extended_buffer_data.flags); 180 msg->setInt64( 181 "timestamp", 182 omx_msg.u.extended_buffer_data.timestamp); 183 msg->setInt32( 184 "fence_fd", omx_msg.fenceFd); 185 break; 186 } 187 188 case omx_message::FRAME_RENDERED: 189 { 190 msg->setInt64( 191 "media_time_us", omx_msg.u.render_data.timestamp); 192 msg->setInt64( 193 "system_nano", omx_msg.u.render_data.nanoTime); 194 break; 195 } 196 197 default: 198 ALOGE("Unrecognized message type: %d", omx_msg.type); 199 break; 200 } 201 msgList->getList().push_back(msg); 202 } 203 notify->setObject("messages", msgList); 204 notify->post(); 205 } 206 207protected: 208 virtual ~CodecObserver() {} 209 210private: 211 sp<AMessage> mNotify; 212 213 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 214}; 215 216//////////////////////////////////////////////////////////////////////////////// 217 218struct ACodec::BaseState : public AState { 219 explicit BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 220 221protected: 222 enum PortMode { 223 KEEP_BUFFERS, 224 RESUBMIT_BUFFERS, 225 FREE_BUFFERS, 226 }; 227 228 ACodec *mCodec; 229 230 virtual PortMode getPortMode(OMX_U32 portIndex); 231 232 virtual bool onMessageReceived(const sp<AMessage> &msg); 233 234 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 235 236 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 237 virtual void onInputBufferFilled(const sp<AMessage> &msg); 238 239 void postFillThisBuffer(BufferInfo *info); 240 241private: 242 // Handles an OMX message. Returns true iff message was handled. 243 bool onOMXMessage(const sp<AMessage> &msg); 244 245 // Handles a list of messages. Returns true iff messages were handled. 246 bool onOMXMessageList(const sp<AMessage> &msg); 247 248 // returns true iff this message is for this component and the component is alive 249 bool checkOMXMessage(const sp<AMessage> &msg); 250 251 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 252 253 bool onOMXFillBufferDone( 254 IOMX::buffer_id bufferID, 255 size_t rangeOffset, size_t rangeLength, 256 OMX_U32 flags, 257 int64_t timeUs, 258 int fenceFd); 259 260 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 261 262 void getMoreInputDataIfPossible(); 263 264 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 265}; 266 267//////////////////////////////////////////////////////////////////////////////// 268 269struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 270 explicit DeathNotifier(const sp<AMessage> ¬ify) 271 : mNotify(notify) { 272 } 273 274 virtual void binderDied(const wp<IBinder> &) { 275 mNotify->post(); 276 } 277 278protected: 279 virtual ~DeathNotifier() {} 280 281private: 282 sp<AMessage> mNotify; 283 284 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 285}; 286 287struct ACodec::UninitializedState : public ACodec::BaseState { 288 explicit UninitializedState(ACodec *codec); 289 290protected: 291 virtual bool onMessageReceived(const sp<AMessage> &msg); 292 virtual void stateEntered(); 293 294private: 295 void onSetup(const sp<AMessage> &msg); 296 bool onAllocateComponent(const sp<AMessage> &msg); 297 298 sp<DeathNotifier> mDeathNotifier; 299 300 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 301}; 302 303//////////////////////////////////////////////////////////////////////////////// 304 305struct ACodec::LoadedState : public ACodec::BaseState { 306 explicit LoadedState(ACodec *codec); 307 308protected: 309 virtual bool onMessageReceived(const sp<AMessage> &msg); 310 virtual void stateEntered(); 311 312private: 313 friend struct ACodec::UninitializedState; 314 315 bool onConfigureComponent(const sp<AMessage> &msg); 316 void onCreateInputSurface(const sp<AMessage> &msg); 317 void onSetInputSurface(const sp<AMessage> &msg); 318 void onStart(); 319 void onShutdown(bool keepComponentAllocated); 320 321 status_t setupInputSurface(); 322 323 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 324}; 325 326//////////////////////////////////////////////////////////////////////////////// 327 328struct ACodec::LoadedToIdleState : public ACodec::BaseState { 329 explicit LoadedToIdleState(ACodec *codec); 330 331protected: 332 virtual bool onMessageReceived(const sp<AMessage> &msg); 333 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 334 virtual void stateEntered(); 335 336private: 337 status_t allocateBuffers(); 338 339 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 340}; 341 342//////////////////////////////////////////////////////////////////////////////// 343 344struct ACodec::IdleToExecutingState : public ACodec::BaseState { 345 explicit IdleToExecutingState(ACodec *codec); 346 347protected: 348 virtual bool onMessageReceived(const sp<AMessage> &msg); 349 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 350 virtual void stateEntered(); 351 352private: 353 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 354}; 355 356//////////////////////////////////////////////////////////////////////////////// 357 358struct ACodec::ExecutingState : public ACodec::BaseState { 359 explicit ExecutingState(ACodec *codec); 360 361 void submitRegularOutputBuffers(); 362 void submitOutputMetaBuffers(); 363 void submitOutputBuffers(); 364 365 // Submit output buffers to the decoder, submit input buffers to client 366 // to fill with data. 367 void resume(); 368 369 // Returns true iff input and output buffers are in play. 370 bool active() const { return mActive; } 371 372protected: 373 virtual PortMode getPortMode(OMX_U32 portIndex); 374 virtual bool onMessageReceived(const sp<AMessage> &msg); 375 virtual void stateEntered(); 376 377 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 378 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 379 380private: 381 bool mActive; 382 383 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 384}; 385 386//////////////////////////////////////////////////////////////////////////////// 387 388struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 389 explicit OutputPortSettingsChangedState(ACodec *codec); 390 391protected: 392 virtual PortMode getPortMode(OMX_U32 portIndex); 393 virtual bool onMessageReceived(const sp<AMessage> &msg); 394 virtual void stateEntered(); 395 396 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 397 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 398 399private: 400 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 401}; 402 403//////////////////////////////////////////////////////////////////////////////// 404 405struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 406 explicit ExecutingToIdleState(ACodec *codec); 407 408protected: 409 virtual bool onMessageReceived(const sp<AMessage> &msg); 410 virtual void stateEntered(); 411 412 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 413 414 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 415 virtual void onInputBufferFilled(const sp<AMessage> &msg); 416 417private: 418 void changeStateIfWeOwnAllBuffers(); 419 420 bool mComponentNowIdle; 421 422 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 423}; 424 425//////////////////////////////////////////////////////////////////////////////// 426 427struct ACodec::IdleToLoadedState : public ACodec::BaseState { 428 explicit IdleToLoadedState(ACodec *codec); 429 430protected: 431 virtual bool onMessageReceived(const sp<AMessage> &msg); 432 virtual void stateEntered(); 433 434 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 435 436private: 437 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 438}; 439 440//////////////////////////////////////////////////////////////////////////////// 441 442struct ACodec::FlushingState : public ACodec::BaseState { 443 explicit FlushingState(ACodec *codec); 444 445protected: 446 virtual bool onMessageReceived(const sp<AMessage> &msg); 447 virtual void stateEntered(); 448 449 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 450 451 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 452 virtual void onInputBufferFilled(const sp<AMessage> &msg); 453 454private: 455 bool mFlushComplete[2]; 456 457 void changeStateIfWeOwnAllBuffers(); 458 459 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 460}; 461 462//////////////////////////////////////////////////////////////////////////////// 463 464void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 465 if (mFenceFd >= 0) { 466 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 467 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 468 } 469 mFenceFd = fenceFd; 470 mIsReadFence = false; 471} 472 473void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 474 if (mFenceFd >= 0) { 475 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 476 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 477 } 478 mFenceFd = fenceFd; 479 mIsReadFence = true; 480} 481 482void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 483 if (mFenceFd >= 0 && mIsReadFence) { 484 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 485 } 486} 487 488void ACodec::BufferInfo::checkReadFence(const char *dbg) { 489 if (mFenceFd >= 0 && !mIsReadFence) { 490 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 491 } 492} 493 494//////////////////////////////////////////////////////////////////////////////// 495 496ACodec::ACodec() 497 : mSampleRate(0), 498 mQuirks(0), 499 mNode(0), 500 mUsingNativeWindow(false), 501 mNativeWindowUsageBits(0), 502 mIsVideo(false), 503 mIsEncoder(false), 504 mFatalError(false), 505 mShutdownInProgress(false), 506 mExplicitShutdown(false), 507 mIsLegacyVP9Decoder(false), 508 mEncoderDelay(0), 509 mEncoderPadding(0), 510 mRotationDegrees(0), 511 mChannelMaskPresent(false), 512 mChannelMask(0), 513 mDequeueCounter(0), 514 mInputMetadataType(kMetadataBufferTypeInvalid), 515 mOutputMetadataType(kMetadataBufferTypeInvalid), 516 mLegacyAdaptiveExperiment(false), 517 mMetadataBuffersToSubmit(0), 518 mNumUndequeuedBuffers(0), 519 mRepeatFrameDelayUs(-1ll), 520 mMaxPtsGapUs(-1ll), 521 mMaxFps(-1), 522 mTimePerFrameUs(-1ll), 523 mTimePerCaptureUs(-1ll), 524 mCreateInputBuffersSuspended(false), 525 mTunneled(false), 526 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0), 527 mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0) { 528 mUninitializedState = new UninitializedState(this); 529 mLoadedState = new LoadedState(this); 530 mLoadedToIdleState = new LoadedToIdleState(this); 531 mIdleToExecutingState = new IdleToExecutingState(this); 532 mExecutingState = new ExecutingState(this); 533 534 mOutputPortSettingsChangedState = 535 new OutputPortSettingsChangedState(this); 536 537 mExecutingToIdleState = new ExecutingToIdleState(this); 538 mIdleToLoadedState = new IdleToLoadedState(this); 539 mFlushingState = new FlushingState(this); 540 541 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 542 mInputEOSResult = OK; 543 544 changeState(mUninitializedState); 545} 546 547ACodec::~ACodec() { 548} 549 550void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 551 mNotify = msg; 552} 553 554void ACodec::initiateSetup(const sp<AMessage> &msg) { 555 msg->setWhat(kWhatSetup); 556 msg->setTarget(this); 557 msg->post(); 558} 559 560void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 561 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 562 msg->setMessage("params", params); 563 msg->post(); 564} 565 566void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 567 msg->setWhat(kWhatAllocateComponent); 568 msg->setTarget(this); 569 msg->post(); 570} 571 572void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 573 msg->setWhat(kWhatConfigureComponent); 574 msg->setTarget(this); 575 msg->post(); 576} 577 578status_t ACodec::setSurface(const sp<Surface> &surface) { 579 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 580 msg->setObject("surface", surface); 581 582 sp<AMessage> response; 583 status_t err = msg->postAndAwaitResponse(&response); 584 585 if (err == OK) { 586 (void)response->findInt32("err", &err); 587 } 588 return err; 589} 590 591void ACodec::initiateCreateInputSurface() { 592 (new AMessage(kWhatCreateInputSurface, this))->post(); 593} 594 595void ACodec::initiateSetInputSurface( 596 const sp<PersistentSurface> &surface) { 597 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 598 msg->setObject("input-surface", surface); 599 msg->post(); 600} 601 602void ACodec::signalEndOfInputStream() { 603 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 604} 605 606void ACodec::initiateStart() { 607 (new AMessage(kWhatStart, this))->post(); 608} 609 610void ACodec::signalFlush() { 611 ALOGV("[%s] signalFlush", mComponentName.c_str()); 612 (new AMessage(kWhatFlush, this))->post(); 613} 614 615void ACodec::signalResume() { 616 (new AMessage(kWhatResume, this))->post(); 617} 618 619void ACodec::initiateShutdown(bool keepComponentAllocated) { 620 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 621 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 622 msg->post(); 623 if (!keepComponentAllocated) { 624 // ensure shutdown completes in 3 seconds 625 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 626 } 627} 628 629void ACodec::signalRequestIDRFrame() { 630 (new AMessage(kWhatRequestIDRFrame, this))->post(); 631} 632 633// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 634// Some codecs may return input buffers before having them processed. 635// This causes a halt if we already signaled an EOS on the input 636// port. For now keep submitting an output buffer if there was an 637// EOS on the input port, but not yet on the output port. 638void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 639 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 640 mMetadataBuffersToSubmit > 0) { 641 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 642 } 643} 644 645status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 646 // allow keeping unset surface 647 if (surface == NULL) { 648 if (mNativeWindow != NULL) { 649 ALOGW("cannot unset a surface"); 650 return INVALID_OPERATION; 651 } 652 return OK; 653 } 654 655 // cannot switch from bytebuffers to surface 656 if (mNativeWindow == NULL) { 657 ALOGW("component was not configured with a surface"); 658 return INVALID_OPERATION; 659 } 660 661 ANativeWindow *nativeWindow = surface.get(); 662 // if we have not yet started the codec, we can simply set the native window 663 if (mBuffers[kPortIndexInput].size() == 0) { 664 mNativeWindow = surface; 665 return OK; 666 } 667 668 // we do not support changing a tunneled surface after start 669 if (mTunneled) { 670 ALOGW("cannot change tunneled surface"); 671 return INVALID_OPERATION; 672 } 673 674 int usageBits = 0; 675 status_t err = setupNativeWindowSizeFormatAndUsage(nativeWindow, &usageBits); 676 if (err != OK) { 677 return err; 678 } 679 680 int ignoredFlags = kVideoGrallocUsage; 681 // New output surface is not allowed to add new usage flag except ignored ones. 682 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 683 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 684 return BAD_VALUE; 685 } 686 687 // get min undequeued count. We cannot switch to a surface that has a higher 688 // undequeued count than we allocated. 689 int minUndequeuedBuffers = 0; 690 err = nativeWindow->query( 691 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 692 &minUndequeuedBuffers); 693 if (err != 0) { 694 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 695 strerror(-err), -err); 696 return err; 697 } 698 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 699 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 700 minUndequeuedBuffers, mNumUndequeuedBuffers); 701 return BAD_VALUE; 702 } 703 704 // we cannot change the number of output buffers while OMX is running 705 // set up surface to the same count 706 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 707 ALOGV("setting up surface for %zu buffers", buffers.size()); 708 709 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 710 if (err != 0) { 711 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 712 -err); 713 return err; 714 } 715 716 // need to enable allocation when attaching 717 surface->getIGraphicBufferProducer()->allowAllocation(true); 718 719 // for meta data mode, we move dequeud buffers to the new surface. 720 // for non-meta mode, we must move all registered buffers 721 for (size_t i = 0; i < buffers.size(); ++i) { 722 const BufferInfo &info = buffers[i]; 723 // skip undequeued buffers for meta data mode 724 if (storingMetadataInDecodedBuffers() 725 && !mLegacyAdaptiveExperiment 726 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 727 ALOGV("skipping buffer"); 728 continue; 729 } 730 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 731 732 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 733 if (err != OK) { 734 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 735 info.mGraphicBuffer->getNativeBuffer(), 736 strerror(-err), -err); 737 return err; 738 } 739 } 740 741 // cancel undequeued buffers to new surface 742 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 743 for (size_t i = 0; i < buffers.size(); ++i) { 744 BufferInfo &info = buffers.editItemAt(i); 745 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 746 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 747 err = nativeWindow->cancelBuffer( 748 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 749 info.mFenceFd = -1; 750 if (err != OK) { 751 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 752 info.mGraphicBuffer->getNativeBuffer(), 753 strerror(-err), -err); 754 return err; 755 } 756 } 757 } 758 // disallow further allocation 759 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 760 } 761 762 // push blank buffers to previous window if requested 763 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 764 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 765 } 766 767 mNativeWindow = nativeWindow; 768 mNativeWindowUsageBits = usageBits; 769 return OK; 770} 771 772status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 773 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 774 775 CHECK(mDealer[portIndex] == NULL); 776 CHECK(mBuffers[portIndex].isEmpty()); 777 778 status_t err; 779 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 780 if (storingMetadataInDecodedBuffers()) { 781 err = allocateOutputMetadataBuffers(); 782 } else { 783 err = allocateOutputBuffersFromNativeWindow(); 784 } 785 } else { 786 OMX_PARAM_PORTDEFINITIONTYPE def; 787 InitOMXParams(&def); 788 def.nPortIndex = portIndex; 789 790 err = mOMX->getParameter( 791 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 792 793 if (err == OK) { 794 MetadataBufferType type = 795 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 796 size_t bufSize = def.nBufferSize; 797 if (type == kMetadataBufferTypeANWBuffer) { 798 bufSize = sizeof(VideoNativeMetadata); 799 } else if (type == kMetadataBufferTypeNativeHandleSource) { 800 bufSize = sizeof(VideoNativeHandleMetadata); 801 } 802 803 // If using gralloc or native source input metadata buffers, allocate largest 804 // metadata size as we prefer to generate native source metadata, but component 805 // may require gralloc source. For camera source, allocate at least enough 806 // size for native metadata buffers. 807 size_t allottedSize = bufSize; 808 if (portIndex == kPortIndexInput && type == kMetadataBufferTypeANWBuffer) { 809 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 810 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 811 bufSize = max(bufSize, sizeof(VideoNativeMetadata)); 812 } 813 814 size_t conversionBufferSize = 0; 815 816 sp<DataConverter> converter = mConverter[portIndex]; 817 if (converter != NULL) { 818 // here we assume sane conversions of max 4:1, so result fits in int32 819 if (portIndex == kPortIndexInput) { 820 conversionBufferSize = converter->sourceSize(bufSize); 821 } else { 822 conversionBufferSize = converter->targetSize(bufSize); 823 } 824 } 825 826 size_t alignment = MemoryDealer::getAllocationAlignment(); 827 828 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", 829 mComponentName.c_str(), 830 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 831 portIndex == kPortIndexInput ? "input" : "output"); 832 833 // verify buffer sizes to avoid overflow in align() 834 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { 835 ALOGE("b/22885421"); 836 return NO_MEMORY; 837 } 838 839 // don't modify bufSize as OMX may not expect it to increase after negotiation 840 size_t alignedSize = align(bufSize, alignment); 841 size_t alignedConvSize = align(conversionBufferSize, alignment); 842 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { 843 ALOGE("b/22885421"); 844 return NO_MEMORY; 845 } 846 847 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); 848 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 849 850 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 851 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 852 if (mem == NULL || mem->pointer() == NULL) { 853 return NO_MEMORY; 854 } 855 856 BufferInfo info; 857 info.mStatus = BufferInfo::OWNED_BY_US; 858 info.mFenceFd = -1; 859 info.mRenderInfo = NULL; 860 info.mNativeHandle = NULL; 861 862 uint32_t requiresAllocateBufferBit = 863 (portIndex == kPortIndexInput) 864 ? kRequiresAllocateBufferOnInputPorts 865 : kRequiresAllocateBufferOnOutputPorts; 866 867 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 868 mem.clear(); 869 870 void *ptr = NULL; 871 native_handle_t *native_handle = NULL; 872 err = mOMX->allocateSecureBuffer( 873 mNode, portIndex, bufSize, &info.mBufferID, 874 &ptr, &native_handle); 875 876 // TRICKY: this representation is unorthodox, but ACodec requires 877 // an ABuffer with a proper size to validate range offsets and lengths. 878 // Since mData is never referenced for secure input, it is used to store 879 // either the pointer to the secure buffer, or the opaque handle as on 880 // some devices ptr is actually an opaque handle, not a pointer. 881 882 // TRICKY2: use native handle as the base of the ABuffer if received one, 883 // because Widevine source only receives these base addresses. 884 info.mData = new ABuffer(ptr != NULL ? ptr : (void *)native_handle, bufSize); 885 info.mNativeHandle = NativeHandle::create(native_handle, true /* ownsHandle */); 886 info.mCodecData = info.mData; 887 } else if (mQuirks & requiresAllocateBufferBit) { 888 err = mOMX->allocateBufferWithBackup( 889 mNode, portIndex, mem, &info.mBufferID, allottedSize); 890 } else { 891 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize); 892 } 893 894 if (mem != NULL) { 895 info.mCodecData = new ABuffer(mem->pointer(), bufSize); 896 info.mCodecRef = mem; 897 898 if (type == kMetadataBufferTypeANWBuffer) { 899 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 900 } 901 902 // if we require conversion, allocate conversion buffer for client use; 903 // otherwise, reuse codec buffer 904 if (mConverter[portIndex] != NULL) { 905 CHECK_GT(conversionBufferSize, (size_t)0); 906 mem = mDealer[portIndex]->allocate(conversionBufferSize); 907 if (mem == NULL|| mem->pointer() == NULL) { 908 return NO_MEMORY; 909 } 910 info.mData = new ABuffer(mem->pointer(), conversionBufferSize); 911 info.mMemRef = mem; 912 } else { 913 info.mData = info.mCodecData; 914 info.mMemRef = info.mCodecRef; 915 } 916 } 917 918 mBuffers[portIndex].push(info); 919 } 920 } 921 } 922 923 if (err != OK) { 924 return err; 925 } 926 927 sp<AMessage> notify = mNotify->dup(); 928 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 929 930 notify->setInt32("portIndex", portIndex); 931 932 sp<PortDescription> desc = new PortDescription; 933 934 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 935 const BufferInfo &info = mBuffers[portIndex][i]; 936 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef); 937 } 938 939 notify->setObject("portDesc", desc); 940 notify->post(); 941 942 return OK; 943} 944 945status_t ACodec::setupNativeWindowSizeFormatAndUsage( 946 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */) { 947 OMX_PARAM_PORTDEFINITIONTYPE def; 948 InitOMXParams(&def); 949 def.nPortIndex = kPortIndexOutput; 950 951 status_t err = mOMX->getParameter( 952 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 953 954 if (err != OK) { 955 return err; 956 } 957 958 OMX_U32 usage = 0; 959 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 960 if (err != 0) { 961 ALOGW("querying usage flags from OMX IL component failed: %d", err); 962 // XXX: Currently this error is logged, but not fatal. 963 usage = 0; 964 } 965 int omxUsage = usage; 966 967 if (mFlags & kFlagIsGrallocUsageProtected) { 968 usage |= GRALLOC_USAGE_PROTECTED; 969 } 970 971 usage |= kVideoGrallocUsage; 972 *finalUsage = usage; 973 974 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 975 return setNativeWindowSizeFormatAndUsage( 976 nativeWindow, 977 def.format.video.nFrameWidth, 978 def.format.video.nFrameHeight, 979 def.format.video.eColorFormat, 980 mRotationDegrees, 981 usage); 982} 983 984status_t ACodec::configureOutputBuffersFromNativeWindow( 985 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 986 OMX_U32 *minUndequeuedBuffers) { 987 OMX_PARAM_PORTDEFINITIONTYPE def; 988 InitOMXParams(&def); 989 def.nPortIndex = kPortIndexOutput; 990 991 status_t err = mOMX->getParameter( 992 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 993 994 if (err == OK) { 995 err = setupNativeWindowSizeFormatAndUsage(mNativeWindow.get(), &mNativeWindowUsageBits); 996 } 997 if (err != OK) { 998 mNativeWindowUsageBits = 0; 999 return err; 1000 } 1001 1002 // Exits here for tunneled video playback codecs -- i.e. skips native window 1003 // buffer allocation step as this is managed by the tunneled OMX omponent 1004 // itself and explicitly sets def.nBufferCountActual to 0. 1005 if (mTunneled) { 1006 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 1007 def.nBufferCountActual = 0; 1008 err = mOMX->setParameter( 1009 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1010 1011 *minUndequeuedBuffers = 0; 1012 *bufferCount = 0; 1013 *bufferSize = 0; 1014 return err; 1015 } 1016 1017 *minUndequeuedBuffers = 0; 1018 err = mNativeWindow->query( 1019 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 1020 (int *)minUndequeuedBuffers); 1021 1022 if (err != 0) { 1023 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 1024 strerror(-err), -err); 1025 return err; 1026 } 1027 1028 // FIXME: assume that surface is controlled by app (native window 1029 // returns the number for the case when surface is not controlled by app) 1030 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 1031 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 1032 1033 // Use conservative allocation while also trying to reduce starvation 1034 // 1035 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 1036 // minimum needed for the consumer to be able to work 1037 // 2. try to allocate two (2) additional buffers to reduce starvation from 1038 // the consumer 1039 // plus an extra buffer to account for incorrect minUndequeuedBufs 1040 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 1041 OMX_U32 newBufferCount = 1042 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1043 def.nBufferCountActual = newBufferCount; 1044 err = mOMX->setParameter( 1045 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1046 1047 if (err == OK) { 1048 *minUndequeuedBuffers += extraBuffers; 1049 break; 1050 } 1051 1052 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1053 mComponentName.c_str(), newBufferCount, err); 1054 /* exit condition */ 1055 if (extraBuffers == 0) { 1056 return err; 1057 } 1058 } 1059 1060 err = native_window_set_buffer_count( 1061 mNativeWindow.get(), def.nBufferCountActual); 1062 1063 if (err != 0) { 1064 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1065 -err); 1066 return err; 1067 } 1068 1069 *bufferCount = def.nBufferCountActual; 1070 *bufferSize = def.nBufferSize; 1071 return err; 1072} 1073 1074status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1075 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1076 status_t err = configureOutputBuffersFromNativeWindow( 1077 &bufferCount, &bufferSize, &minUndequeuedBuffers); 1078 if (err != 0) 1079 return err; 1080 mNumUndequeuedBuffers = minUndequeuedBuffers; 1081 1082 if (!storingMetadataInDecodedBuffers()) { 1083 static_cast<Surface*>(mNativeWindow.get()) 1084 ->getIGraphicBufferProducer()->allowAllocation(true); 1085 } 1086 1087 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1088 "output port", 1089 mComponentName.c_str(), bufferCount, bufferSize); 1090 1091 // Dequeue buffers and send them to OMX 1092 for (OMX_U32 i = 0; i < bufferCount; i++) { 1093 ANativeWindowBuffer *buf; 1094 int fenceFd; 1095 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1096 if (err != 0) { 1097 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1098 break; 1099 } 1100 1101 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1102 BufferInfo info; 1103 info.mStatus = BufferInfo::OWNED_BY_US; 1104 info.mFenceFd = fenceFd; 1105 info.mIsReadFence = false; 1106 info.mRenderInfo = NULL; 1107 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 1108 info.mCodecData = info.mData; 1109 info.mGraphicBuffer = graphicBuffer; 1110 mBuffers[kPortIndexOutput].push(info); 1111 1112 IOMX::buffer_id bufferId; 1113 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 1114 &bufferId); 1115 if (err != 0) { 1116 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1117 "%d", i, err); 1118 break; 1119 } 1120 1121 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1122 1123 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1124 mComponentName.c_str(), 1125 bufferId, graphicBuffer.get()); 1126 } 1127 1128 OMX_U32 cancelStart; 1129 OMX_U32 cancelEnd; 1130 1131 if (err != 0) { 1132 // If an error occurred while dequeuing we need to cancel any buffers 1133 // that were dequeued. 1134 cancelStart = 0; 1135 cancelEnd = mBuffers[kPortIndexOutput].size(); 1136 } else { 1137 // Return the required minimum undequeued buffers to the native window. 1138 cancelStart = bufferCount - minUndequeuedBuffers; 1139 cancelEnd = bufferCount; 1140 } 1141 1142 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1143 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1144 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1145 status_t error = cancelBufferToNativeWindow(info); 1146 if (err == 0) { 1147 err = error; 1148 } 1149 } 1150 } 1151 1152 if (!storingMetadataInDecodedBuffers()) { 1153 static_cast<Surface*>(mNativeWindow.get()) 1154 ->getIGraphicBufferProducer()->allowAllocation(false); 1155 } 1156 1157 return err; 1158} 1159 1160status_t ACodec::allocateOutputMetadataBuffers() { 1161 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1162 status_t err = configureOutputBuffersFromNativeWindow( 1163 &bufferCount, &bufferSize, &minUndequeuedBuffers); 1164 if (err != 0) 1165 return err; 1166 mNumUndequeuedBuffers = minUndequeuedBuffers; 1167 1168 ALOGV("[%s] Allocating %u meta buffers on output port", 1169 mComponentName.c_str(), bufferCount); 1170 1171 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1172 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1173 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1174 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1175 1176 // Dequeue buffers and send them to OMX 1177 for (OMX_U32 i = 0; i < bufferCount; i++) { 1178 BufferInfo info; 1179 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1180 info.mFenceFd = -1; 1181 info.mRenderInfo = NULL; 1182 info.mGraphicBuffer = NULL; 1183 info.mDequeuedAt = mDequeueCounter; 1184 1185 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1186 if (mem == NULL || mem->pointer() == NULL) { 1187 return NO_MEMORY; 1188 } 1189 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1190 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1191 } 1192 info.mData = new ABuffer(mem->pointer(), mem->size()); 1193 info.mMemRef = mem; 1194 info.mCodecData = info.mData; 1195 info.mCodecRef = mem; 1196 1197 // we use useBuffer for metadata regardless of quirks 1198 err = mOMX->useBuffer( 1199 mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1200 mBuffers[kPortIndexOutput].push(info); 1201 1202 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1203 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1204 } 1205 1206 if (mLegacyAdaptiveExperiment) { 1207 // preallocate and preregister buffers 1208 static_cast<Surface *>(mNativeWindow.get()) 1209 ->getIGraphicBufferProducer()->allowAllocation(true); 1210 1211 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1212 "output port", 1213 mComponentName.c_str(), bufferCount, bufferSize); 1214 1215 // Dequeue buffers then cancel them all 1216 for (OMX_U32 i = 0; i < bufferCount; i++) { 1217 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1218 1219 ANativeWindowBuffer *buf; 1220 int fenceFd; 1221 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1222 if (err != 0) { 1223 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1224 break; 1225 } 1226 1227 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1228 mOMX->updateGraphicBufferInMeta( 1229 mNode, kPortIndexOutput, graphicBuffer, info->mBufferID); 1230 info->mStatus = BufferInfo::OWNED_BY_US; 1231 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1232 info->mGraphicBuffer = graphicBuffer; 1233 } 1234 1235 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1236 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1237 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1238 status_t error = cancelBufferToNativeWindow(info); 1239 if (err == OK) { 1240 err = error; 1241 } 1242 } 1243 } 1244 1245 static_cast<Surface*>(mNativeWindow.get()) 1246 ->getIGraphicBufferProducer()->allowAllocation(false); 1247 } 1248 1249 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1250 return err; 1251} 1252 1253status_t ACodec::submitOutputMetadataBuffer() { 1254 CHECK(storingMetadataInDecodedBuffers()); 1255 if (mMetadataBuffersToSubmit == 0) 1256 return OK; 1257 1258 BufferInfo *info = dequeueBufferFromNativeWindow(); 1259 if (info == NULL) { 1260 return ERROR_IO; 1261 } 1262 1263 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1264 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1265 1266 --mMetadataBuffersToSubmit; 1267 info->checkWriteFence("submitOutputMetadataBuffer"); 1268 status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd); 1269 info->mFenceFd = -1; 1270 if (err == OK) { 1271 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1272 } 1273 1274 return err; 1275} 1276 1277status_t ACodec::waitForFence(int fd, const char *dbg ) { 1278 status_t res = OK; 1279 if (fd >= 0) { 1280 sp<Fence> fence = new Fence(fd); 1281 res = fence->wait(IOMX::kFenceTimeoutMs); 1282 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1283 } 1284 return res; 1285} 1286 1287// static 1288const char *ACodec::_asString(BufferInfo::Status s) { 1289 switch (s) { 1290 case BufferInfo::OWNED_BY_US: return "OUR"; 1291 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1292 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1293 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1294 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1295 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1296 default: return "?"; 1297 } 1298} 1299 1300void ACodec::dumpBuffers(OMX_U32 portIndex) { 1301 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1302 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1303 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1304 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1305 const BufferInfo &info = mBuffers[portIndex][i]; 1306 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1307 i, info.mBufferID, info.mGraphicBuffer.get(), 1308 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1309 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1310 } 1311} 1312 1313status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1314 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1315 1316 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1317 mComponentName.c_str(), info->mBufferID); 1318 1319 info->checkWriteFence("cancelBufferToNativeWindow"); 1320 int err = mNativeWindow->cancelBuffer( 1321 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1322 info->mFenceFd = -1; 1323 1324 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1325 mComponentName.c_str(), info->mBufferID); 1326 // change ownership even if cancelBuffer fails 1327 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1328 1329 return err; 1330} 1331 1332void ACodec::updateRenderInfoForDequeuedBuffer( 1333 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1334 1335 info->mRenderInfo = 1336 mRenderTracker.updateInfoForDequeuedBuffer( 1337 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1338 1339 // check for any fences already signaled 1340 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1341} 1342 1343void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1344 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1345 mRenderTracker.dumpRenderQueue(); 1346 } 1347} 1348 1349void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1350 sp<AMessage> msg = mNotify->dup(); 1351 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1352 std::list<FrameRenderTracker::Info> done = 1353 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1354 1355 // unlink untracked frames 1356 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1357 it != done.cend(); ++it) { 1358 ssize_t index = it->getIndex(); 1359 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1360 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1361 } else if (index >= 0) { 1362 // THIS SHOULD NEVER HAPPEN 1363 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1364 } 1365 } 1366 1367 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1368 msg->post(); 1369 } 1370} 1371 1372ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1373 ANativeWindowBuffer *buf; 1374 CHECK(mNativeWindow.get() != NULL); 1375 1376 if (mTunneled) { 1377 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1378 " video playback mode mode!"); 1379 return NULL; 1380 } 1381 1382 if (mFatalError) { 1383 ALOGW("not dequeuing from native window due to fatal error"); 1384 return NULL; 1385 } 1386 1387 int fenceFd = -1; 1388 do { 1389 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1390 if (err != 0) { 1391 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1392 return NULL; 1393 } 1394 1395 bool stale = false; 1396 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1397 i--; 1398 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1399 1400 if (info->mGraphicBuffer != NULL && 1401 info->mGraphicBuffer->handle == buf->handle) { 1402 // Since consumers can attach buffers to BufferQueues, it is possible 1403 // that a known yet stale buffer can return from a surface that we 1404 // once used. We can simply ignore this as we have already dequeued 1405 // this buffer properly. NOTE: this does not eliminate all cases, 1406 // e.g. it is possible that we have queued the valid buffer to the 1407 // NW, and a stale copy of the same buffer gets dequeued - which will 1408 // be treated as the valid buffer by ACodec. 1409 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1410 ALOGI("dequeued stale buffer %p. discarding", buf); 1411 stale = true; 1412 break; 1413 } 1414 1415 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1416 info->mStatus = BufferInfo::OWNED_BY_US; 1417 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1418 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1419 return info; 1420 } 1421 } 1422 1423 // It is also possible to receive a previously unregistered buffer 1424 // in non-meta mode. These should be treated as stale buffers. The 1425 // same is possible in meta mode, in which case, it will be treated 1426 // as a normal buffer, which is not desirable. 1427 // TODO: fix this. 1428 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1429 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1430 stale = true; 1431 } 1432 if (stale) { 1433 // TODO: detach stale buffer, but there is no API yet to do it. 1434 buf = NULL; 1435 } 1436 } while (buf == NULL); 1437 1438 // get oldest undequeued buffer 1439 BufferInfo *oldest = NULL; 1440 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1441 i--; 1442 BufferInfo *info = 1443 &mBuffers[kPortIndexOutput].editItemAt(i); 1444 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1445 (oldest == NULL || 1446 // avoid potential issues from counter rolling over 1447 mDequeueCounter - info->mDequeuedAt > 1448 mDequeueCounter - oldest->mDequeuedAt)) { 1449 oldest = info; 1450 } 1451 } 1452 1453 // it is impossible dequeue a buffer when there are no buffers with ANW 1454 CHECK(oldest != NULL); 1455 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1456 // while loop above does not complete 1457 CHECK(storingMetadataInDecodedBuffers()); 1458 1459 // discard buffer in LRU info and replace with new buffer 1460 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1461 oldest->mStatus = BufferInfo::OWNED_BY_US; 1462 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1463 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1464 oldest->mRenderInfo = NULL; 1465 1466 mOMX->updateGraphicBufferInMeta( 1467 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 1468 oldest->mBufferID); 1469 1470 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1471 VideoGrallocMetadata *grallocMeta = 1472 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base()); 1473 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1474 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1475 mDequeueCounter - oldest->mDequeuedAt, 1476 (void *)(uintptr_t)grallocMeta->pHandle, 1477 oldest->mGraphicBuffer->handle, oldest->mData->base()); 1478 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1479 VideoNativeMetadata *nativeMeta = 1480 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base()); 1481 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1482 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1483 mDequeueCounter - oldest->mDequeuedAt, 1484 (void *)(uintptr_t)nativeMeta->pBuffer, 1485 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base()); 1486 } 1487 1488 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1489 return oldest; 1490} 1491 1492status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1493 status_t err = OK; 1494 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1495 i--; 1496 status_t err2 = freeBuffer(portIndex, i); 1497 if (err == OK) { 1498 err = err2; 1499 } 1500 } 1501 1502 // clear mDealer even on an error 1503 mDealer[portIndex].clear(); 1504 return err; 1505} 1506 1507status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1508 status_t err = OK; 1509 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1510 i--; 1511 BufferInfo *info = 1512 &mBuffers[kPortIndexOutput].editItemAt(i); 1513 1514 // At this time some buffers may still be with the component 1515 // or being drained. 1516 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1517 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1518 status_t err2 = freeBuffer(kPortIndexOutput, i); 1519 if (err == OK) { 1520 err = err2; 1521 } 1522 } 1523 } 1524 1525 return err; 1526} 1527 1528status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1529 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1530 status_t err = OK; 1531 1532 // there should not be any fences in the metadata 1533 MetadataBufferType type = 1534 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1535 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL 1536 && info->mData->size() >= sizeof(VideoNativeMetadata)) { 1537 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; 1538 if (fenceFd >= 0) { 1539 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1540 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1541 } 1542 } 1543 1544 switch (info->mStatus) { 1545 case BufferInfo::OWNED_BY_US: 1546 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1547 (void)cancelBufferToNativeWindow(info); 1548 } 1549 // fall through 1550 1551 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1552 err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID); 1553 break; 1554 1555 default: 1556 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1557 err = FAILED_TRANSACTION; 1558 break; 1559 } 1560 1561 if (info->mFenceFd >= 0) { 1562 ::close(info->mFenceFd); 1563 } 1564 1565 if (portIndex == kPortIndexOutput) { 1566 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1567 info->mRenderInfo = NULL; 1568 } 1569 1570 // remove buffer even if mOMX->freeBuffer fails 1571 mBuffers[portIndex].removeAt(i); 1572 return err; 1573} 1574 1575ACodec::BufferInfo *ACodec::findBufferByID( 1576 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1577 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1578 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1579 1580 if (info->mBufferID == bufferID) { 1581 if (index != NULL) { 1582 *index = i; 1583 } 1584 return info; 1585 } 1586 } 1587 1588 ALOGE("Could not find buffer with ID %u", bufferID); 1589 return NULL; 1590} 1591 1592status_t ACodec::setComponentRole( 1593 bool isEncoder, const char *mime) { 1594 const char *role = getComponentRole(isEncoder, mime); 1595 if (role == NULL) { 1596 return BAD_VALUE; 1597 } 1598 status_t err = setComponentRole(mOMX, mNode, role); 1599 if (err != OK) { 1600 ALOGW("[%s] Failed to set standard component role '%s'.", 1601 mComponentName.c_str(), role); 1602 } 1603 return err; 1604} 1605 1606//static 1607const char *ACodec::getComponentRole( 1608 bool isEncoder, const char *mime) { 1609 struct MimeToRole { 1610 const char *mime; 1611 const char *decoderRole; 1612 const char *encoderRole; 1613 }; 1614 1615 static const MimeToRole kMimeToRole[] = { 1616 { MEDIA_MIMETYPE_AUDIO_MPEG, 1617 "audio_decoder.mp3", "audio_encoder.mp3" }, 1618 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I, 1619 "audio_decoder.mp1", "audio_encoder.mp1" }, 1620 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, 1621 "audio_decoder.mp2", "audio_encoder.mp2" }, 1622 { MEDIA_MIMETYPE_AUDIO_AMR_NB, 1623 "audio_decoder.amrnb", "audio_encoder.amrnb" }, 1624 { MEDIA_MIMETYPE_AUDIO_AMR_WB, 1625 "audio_decoder.amrwb", "audio_encoder.amrwb" }, 1626 { MEDIA_MIMETYPE_AUDIO_AAC, 1627 "audio_decoder.aac", "audio_encoder.aac" }, 1628 { MEDIA_MIMETYPE_AUDIO_VORBIS, 1629 "audio_decoder.vorbis", "audio_encoder.vorbis" }, 1630 { MEDIA_MIMETYPE_AUDIO_OPUS, 1631 "audio_decoder.opus", "audio_encoder.opus" }, 1632 { MEDIA_MIMETYPE_AUDIO_G711_MLAW, 1633 "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" }, 1634 { MEDIA_MIMETYPE_AUDIO_G711_ALAW, 1635 "audio_decoder.g711alaw", "audio_encoder.g711alaw" }, 1636 { MEDIA_MIMETYPE_VIDEO_AVC, 1637 "video_decoder.avc", "video_encoder.avc" }, 1638 { MEDIA_MIMETYPE_VIDEO_HEVC, 1639 "video_decoder.hevc", "video_encoder.hevc" }, 1640 { MEDIA_MIMETYPE_VIDEO_MPEG4, 1641 "video_decoder.mpeg4", "video_encoder.mpeg4" }, 1642 { MEDIA_MIMETYPE_VIDEO_H263, 1643 "video_decoder.h263", "video_encoder.h263" }, 1644 { MEDIA_MIMETYPE_VIDEO_VP8, 1645 "video_decoder.vp8", "video_encoder.vp8" }, 1646 { MEDIA_MIMETYPE_VIDEO_VP9, 1647 "video_decoder.vp9", "video_encoder.vp9" }, 1648 { MEDIA_MIMETYPE_AUDIO_RAW, 1649 "audio_decoder.raw", "audio_encoder.raw" }, 1650 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, 1651 "video_decoder.dolby-vision", "video_encoder.dolby-vision" }, 1652 { MEDIA_MIMETYPE_AUDIO_FLAC, 1653 "audio_decoder.flac", "audio_encoder.flac" }, 1654 { MEDIA_MIMETYPE_AUDIO_MSGSM, 1655 "audio_decoder.gsm", "audio_encoder.gsm" }, 1656 { MEDIA_MIMETYPE_VIDEO_MPEG2, 1657 "video_decoder.mpeg2", "video_encoder.mpeg2" }, 1658 { MEDIA_MIMETYPE_AUDIO_AC3, 1659 "audio_decoder.ac3", "audio_encoder.ac3" }, 1660 { MEDIA_MIMETYPE_AUDIO_EAC3, 1661 "audio_decoder.eac3", "audio_encoder.eac3" }, 1662 }; 1663 1664 static const size_t kNumMimeToRole = 1665 sizeof(kMimeToRole) / sizeof(kMimeToRole[0]); 1666 1667 size_t i; 1668 for (i = 0; i < kNumMimeToRole; ++i) { 1669 if (!strcasecmp(mime, kMimeToRole[i].mime)) { 1670 break; 1671 } 1672 } 1673 1674 if (i == kNumMimeToRole) { 1675 return NULL; 1676 } 1677 1678 return isEncoder ? kMimeToRole[i].encoderRole 1679 : kMimeToRole[i].decoderRole; 1680} 1681 1682//static 1683status_t ACodec::setComponentRole( 1684 const sp<IOMX> &omx, IOMX::node_id node, const char *role) { 1685 OMX_PARAM_COMPONENTROLETYPE roleParams; 1686 InitOMXParams(&roleParams); 1687 1688 strncpy((char *)roleParams.cRole, 1689 role, OMX_MAX_STRINGNAME_SIZE - 1); 1690 1691 roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0'; 1692 1693 return omx->setParameter( 1694 node, OMX_IndexParamStandardComponentRole, 1695 &roleParams, sizeof(roleParams)); 1696} 1697 1698status_t ACodec::configureCodec( 1699 const char *mime, const sp<AMessage> &msg) { 1700 int32_t encoder; 1701 if (!msg->findInt32("encoder", &encoder)) { 1702 encoder = false; 1703 } 1704 1705 sp<AMessage> inputFormat = new AMessage; 1706 sp<AMessage> outputFormat = new AMessage; 1707 mConfigFormat = msg; 1708 1709 mIsEncoder = encoder; 1710 1711 mInputMetadataType = kMetadataBufferTypeInvalid; 1712 mOutputMetadataType = kMetadataBufferTypeInvalid; 1713 1714 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1715 1716 if (err != OK) { 1717 return err; 1718 } 1719 1720 int32_t bitRate = 0; 1721 // FLAC encoder doesn't need a bitrate, other encoders do 1722 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1723 && !msg->findInt32("bitrate", &bitRate)) { 1724 return INVALID_OPERATION; 1725 } 1726 1727 int32_t storeMeta; 1728 if (encoder 1729 && msg->findInt32("android._input-metadata-buffer-type", &storeMeta) 1730 && storeMeta != kMetadataBufferTypeInvalid) { 1731 mInputMetadataType = (MetadataBufferType)storeMeta; 1732 err = mOMX->storeMetaDataInBuffers( 1733 mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1734 if (err != OK) { 1735 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1736 mComponentName.c_str(), err); 1737 1738 return err; 1739 } else if (storeMeta == kMetadataBufferTypeANWBuffer 1740 && mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1741 // IOMX translates ANWBuffers to gralloc source already. 1742 mInputMetadataType = (MetadataBufferType)storeMeta; 1743 } 1744 1745 uint32_t usageBits; 1746 if (mOMX->getParameter( 1747 mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1748 &usageBits, sizeof(usageBits)) == OK) { 1749 inputFormat->setInt32( 1750 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1751 } 1752 } 1753 1754 int32_t prependSPSPPS = 0; 1755 if (encoder 1756 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1757 && prependSPSPPS != 0) { 1758 OMX_INDEXTYPE index; 1759 err = mOMX->getExtensionIndex( 1760 mNode, 1761 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1762 &index); 1763 1764 if (err == OK) { 1765 PrependSPSPPSToIDRFramesParams params; 1766 InitOMXParams(¶ms); 1767 params.bEnable = OMX_TRUE; 1768 1769 err = mOMX->setParameter( 1770 mNode, index, ¶ms, sizeof(params)); 1771 } 1772 1773 if (err != OK) { 1774 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1775 "IDR frames. (err %d)", err); 1776 1777 return err; 1778 } 1779 } 1780 1781 // Only enable metadata mode on encoder output if encoder can prepend 1782 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1783 // opaque handle, to which we don't have access. 1784 int32_t video = !strncasecmp(mime, "video/", 6); 1785 mIsVideo = video; 1786 if (encoder && video) { 1787 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1788 && msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta) 1789 && storeMeta != 0); 1790 1791 mOutputMetadataType = kMetadataBufferTypeNativeHandleSource; 1792 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType); 1793 if (err != OK) { 1794 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1795 mComponentName.c_str(), err); 1796 } 1797 1798 if (!msg->findInt64( 1799 "repeat-previous-frame-after", 1800 &mRepeatFrameDelayUs)) { 1801 mRepeatFrameDelayUs = -1ll; 1802 } 1803 1804 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1805 mMaxPtsGapUs = -1ll; 1806 } 1807 1808 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1809 mMaxFps = -1; 1810 } 1811 1812 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1813 mTimePerCaptureUs = -1ll; 1814 } 1815 1816 if (!msg->findInt32( 1817 "create-input-buffers-suspended", 1818 (int32_t*)&mCreateInputBuffersSuspended)) { 1819 mCreateInputBuffersSuspended = false; 1820 } 1821 } 1822 1823 // NOTE: we only use native window for video decoders 1824 sp<RefBase> obj; 1825 bool haveNativeWindow = msg->findObject("native-window", &obj) 1826 && obj != NULL && video && !encoder; 1827 mUsingNativeWindow = haveNativeWindow; 1828 mLegacyAdaptiveExperiment = false; 1829 if (video && !encoder) { 1830 inputFormat->setInt32("adaptive-playback", false); 1831 1832 int32_t usageProtected; 1833 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1834 if (!haveNativeWindow) { 1835 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1836 return PERMISSION_DENIED; 1837 } 1838 mFlags |= kFlagIsGrallocUsageProtected; 1839 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1840 } 1841 1842 if (mFlags & kFlagIsSecure) { 1843 // use native_handles for secure input buffers 1844 err = mOMX->enableNativeBuffers( 1845 mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1846 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1847 err = OK; // ignore error for now 1848 } 1849 } 1850 if (haveNativeWindow) { 1851 sp<ANativeWindow> nativeWindow = 1852 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1853 1854 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1855 int32_t autoFrc; 1856 if (msg->findInt32("auto-frc", &autoFrc)) { 1857 bool enabled = autoFrc; 1858 OMX_CONFIG_BOOLEANTYPE config; 1859 InitOMXParams(&config); 1860 config.bEnabled = (OMX_BOOL)enabled; 1861 status_t temp = mOMX->setConfig( 1862 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1863 &config, sizeof(config)); 1864 if (temp == OK) { 1865 outputFormat->setInt32("auto-frc", enabled); 1866 } else if (enabled) { 1867 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1868 } 1869 } 1870 // END of temporary support for automatic FRC 1871 1872 int32_t tunneled; 1873 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1874 tunneled != 0) { 1875 ALOGI("Configuring TUNNELED video playback."); 1876 mTunneled = true; 1877 1878 int32_t audioHwSync = 0; 1879 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1880 ALOGW("No Audio HW Sync provided for video tunnel"); 1881 } 1882 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1883 if (err != OK) { 1884 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1885 audioHwSync, nativeWindow.get()); 1886 return err; 1887 } 1888 1889 int32_t maxWidth = 0, maxHeight = 0; 1890 if (msg->findInt32("max-width", &maxWidth) && 1891 msg->findInt32("max-height", &maxHeight)) { 1892 1893 err = mOMX->prepareForAdaptivePlayback( 1894 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1895 if (err != OK) { 1896 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1897 mComponentName.c_str(), err); 1898 // allow failure 1899 err = OK; 1900 } else { 1901 inputFormat->setInt32("max-width", maxWidth); 1902 inputFormat->setInt32("max-height", maxHeight); 1903 inputFormat->setInt32("adaptive-playback", true); 1904 } 1905 } 1906 } else { 1907 ALOGV("Configuring CPU controlled video playback."); 1908 mTunneled = false; 1909 1910 // Explicity reset the sideband handle of the window for 1911 // non-tunneled video in case the window was previously used 1912 // for a tunneled video playback. 1913 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1914 if (err != OK) { 1915 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1916 return err; 1917 } 1918 1919 // Always try to enable dynamic output buffers on native surface 1920 mOutputMetadataType = kMetadataBufferTypeANWBuffer; 1921 err = mOMX->storeMetaDataInBuffers( 1922 mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1923 if (err != OK) { 1924 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1925 mComponentName.c_str(), err); 1926 1927 // if adaptive playback has been requested, try JB fallback 1928 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1929 // LARGE MEMORY REQUIREMENT 1930 1931 // we will not do adaptive playback on software accessed 1932 // surfaces as they never had to respond to changes in the 1933 // crop window, and we don't trust that they will be able to. 1934 int usageBits = 0; 1935 bool canDoAdaptivePlayback; 1936 1937 if (nativeWindow->query( 1938 nativeWindow.get(), 1939 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1940 &usageBits) != OK) { 1941 canDoAdaptivePlayback = false; 1942 } else { 1943 canDoAdaptivePlayback = 1944 (usageBits & 1945 (GRALLOC_USAGE_SW_READ_MASK | 1946 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1947 } 1948 1949 int32_t maxWidth = 0, maxHeight = 0; 1950 if (canDoAdaptivePlayback && 1951 msg->findInt32("max-width", &maxWidth) && 1952 msg->findInt32("max-height", &maxHeight)) { 1953 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1954 mComponentName.c_str(), maxWidth, maxHeight); 1955 1956 err = mOMX->prepareForAdaptivePlayback( 1957 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1958 maxHeight); 1959 ALOGW_IF(err != OK, 1960 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1961 mComponentName.c_str(), err); 1962 1963 if (err == OK) { 1964 inputFormat->setInt32("max-width", maxWidth); 1965 inputFormat->setInt32("max-height", maxHeight); 1966 inputFormat->setInt32("adaptive-playback", true); 1967 } 1968 } 1969 // allow failure 1970 err = OK; 1971 } else { 1972 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1973 mComponentName.c_str()); 1974 CHECK(storingMetadataInDecodedBuffers()); 1975 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 1976 "legacy-adaptive", !msg->contains("no-experiments")); 1977 1978 inputFormat->setInt32("adaptive-playback", true); 1979 } 1980 1981 int32_t push; 1982 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 1983 && push != 0) { 1984 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1985 } 1986 } 1987 1988 int32_t rotationDegrees; 1989 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 1990 mRotationDegrees = rotationDegrees; 1991 } else { 1992 mRotationDegrees = 0; 1993 } 1994 } 1995 1996 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 1997 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 1998 // invalid encodings will default to PCM-16bit in setupRawAudioFormat. 1999 2000 if (video) { 2001 // determine need for software renderer 2002 bool usingSwRenderer = false; 2003 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 2004 usingSwRenderer = true; 2005 haveNativeWindow = false; 2006 } 2007 2008 if (encoder) { 2009 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 2010 } else { 2011 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2012 } 2013 2014 if (err != OK) { 2015 return err; 2016 } 2017 2018 if (haveNativeWindow) { 2019 mNativeWindow = static_cast<Surface *>(obj.get()); 2020 } 2021 2022 // initialize native window now to get actual output format 2023 // TODO: this is needed for some encoders even though they don't use native window 2024 err = initNativeWindow(); 2025 if (err != OK) { 2026 return err; 2027 } 2028 2029 // fallback for devices that do not handle flex-YUV for native buffers 2030 if (haveNativeWindow) { 2031 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 2032 if (msg->findInt32("color-format", &requestedColorFormat) && 2033 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 2034 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 2035 if (err != OK) { 2036 return err; 2037 } 2038 int32_t colorFormat = OMX_COLOR_FormatUnused; 2039 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 2040 if (!outputFormat->findInt32("color-format", &colorFormat)) { 2041 ALOGE("ouptut port did not have a color format (wrong domain?)"); 2042 return BAD_VALUE; 2043 } 2044 ALOGD("[%s] Requested output format %#x and got %#x.", 2045 mComponentName.c_str(), requestedColorFormat, colorFormat); 2046 if (!isFlexibleColorFormat( 2047 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 2048 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 2049 // device did not handle flex-YUV request for native window, fall back 2050 // to SW renderer 2051 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 2052 mNativeWindow.clear(); 2053 mNativeWindowUsageBits = 0; 2054 haveNativeWindow = false; 2055 usingSwRenderer = true; 2056 if (storingMetadataInDecodedBuffers()) { 2057 err = mOMX->storeMetaDataInBuffers( 2058 mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 2059 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 2060 // TODO: implement adaptive-playback support for bytebuffer mode. 2061 // This is done by SW codecs, but most HW codecs don't support it. 2062 inputFormat->setInt32("adaptive-playback", false); 2063 } 2064 if (err == OK) { 2065 err = mOMX->enableNativeBuffers( 2066 mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 2067 } 2068 if (mFlags & kFlagIsGrallocUsageProtected) { 2069 // fallback is not supported for protected playback 2070 err = PERMISSION_DENIED; 2071 } else if (err == OK) { 2072 err = setupVideoDecoder( 2073 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2074 } 2075 } 2076 } 2077 } 2078 2079 if (usingSwRenderer) { 2080 outputFormat->setInt32("using-sw-renderer", 1); 2081 } 2082 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2083 int32_t numChannels, sampleRate; 2084 if (!msg->findInt32("channel-count", &numChannels) 2085 || !msg->findInt32("sample-rate", &sampleRate)) { 2086 // Since we did not always check for these, leave them optional 2087 // and have the decoder figure it all out. 2088 err = OK; 2089 } else { 2090 err = setupRawAudioFormat( 2091 encoder ? kPortIndexInput : kPortIndexOutput, 2092 sampleRate, 2093 numChannels); 2094 } 2095 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2096 int32_t numChannels, sampleRate; 2097 if (!msg->findInt32("channel-count", &numChannels) 2098 || !msg->findInt32("sample-rate", &sampleRate)) { 2099 err = INVALID_OPERATION; 2100 } else { 2101 int32_t isADTS, aacProfile; 2102 int32_t sbrMode; 2103 int32_t maxOutputChannelCount; 2104 int32_t pcmLimiterEnable; 2105 drcParams_t drc; 2106 if (!msg->findInt32("is-adts", &isADTS)) { 2107 isADTS = 0; 2108 } 2109 if (!msg->findInt32("aac-profile", &aacProfile)) { 2110 aacProfile = OMX_AUDIO_AACObjectNull; 2111 } 2112 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2113 sbrMode = -1; 2114 } 2115 2116 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2117 maxOutputChannelCount = -1; 2118 } 2119 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2120 // value is unknown 2121 pcmLimiterEnable = -1; 2122 } 2123 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2124 // value is unknown 2125 drc.encodedTargetLevel = -1; 2126 } 2127 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2128 // value is unknown 2129 drc.drcCut = -1; 2130 } 2131 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2132 // value is unknown 2133 drc.drcBoost = -1; 2134 } 2135 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2136 // value is unknown 2137 drc.heavyCompression = -1; 2138 } 2139 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2140 // value is unknown 2141 drc.targetRefLevel = -1; 2142 } 2143 2144 err = setupAACCodec( 2145 encoder, numChannels, sampleRate, bitRate, aacProfile, 2146 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2147 pcmLimiterEnable); 2148 } 2149 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2150 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2151 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2152 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2153 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2154 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2155 // These are PCM-like formats with a fixed sample rate but 2156 // a variable number of channels. 2157 2158 int32_t numChannels; 2159 if (!msg->findInt32("channel-count", &numChannels)) { 2160 err = INVALID_OPERATION; 2161 } else { 2162 int32_t sampleRate; 2163 if (!msg->findInt32("sample-rate", &sampleRate)) { 2164 sampleRate = 8000; 2165 } 2166 err = setupG711Codec(encoder, sampleRate, numChannels); 2167 } 2168 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2169 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2170 if (encoder && 2171 (!msg->findInt32("channel-count", &numChannels) 2172 || !msg->findInt32("sample-rate", &sampleRate))) { 2173 ALOGE("missing channel count or sample rate for FLAC encoder"); 2174 err = INVALID_OPERATION; 2175 } else { 2176 if (encoder) { 2177 if (!msg->findInt32( 2178 "complexity", &compressionLevel) && 2179 !msg->findInt32( 2180 "flac-compression-level", &compressionLevel)) { 2181 compressionLevel = 5; // default FLAC compression level 2182 } else if (compressionLevel < 0) { 2183 ALOGW("compression level %d outside [0..8] range, " 2184 "using 0", 2185 compressionLevel); 2186 compressionLevel = 0; 2187 } else if (compressionLevel > 8) { 2188 ALOGW("compression level %d outside [0..8] range, " 2189 "using 8", 2190 compressionLevel); 2191 compressionLevel = 8; 2192 } 2193 } 2194 err = setupFlacCodec( 2195 encoder, numChannels, sampleRate, compressionLevel); 2196 } 2197 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2198 int32_t numChannels, sampleRate; 2199 if (encoder 2200 || !msg->findInt32("channel-count", &numChannels) 2201 || !msg->findInt32("sample-rate", &sampleRate)) { 2202 err = INVALID_OPERATION; 2203 } else { 2204 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); 2205 } 2206 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2207 int32_t numChannels; 2208 int32_t sampleRate; 2209 if (!msg->findInt32("channel-count", &numChannels) 2210 || !msg->findInt32("sample-rate", &sampleRate)) { 2211 err = INVALID_OPERATION; 2212 } else { 2213 err = setupAC3Codec(encoder, numChannels, sampleRate); 2214 } 2215 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2216 int32_t numChannels; 2217 int32_t sampleRate; 2218 if (!msg->findInt32("channel-count", &numChannels) 2219 || !msg->findInt32("sample-rate", &sampleRate)) { 2220 err = INVALID_OPERATION; 2221 } else { 2222 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2223 } 2224 } 2225 2226 if (err != OK) { 2227 return err; 2228 } 2229 2230 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2231 mEncoderDelay = 0; 2232 } 2233 2234 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2235 mEncoderPadding = 0; 2236 } 2237 2238 if (msg->findInt32("channel-mask", &mChannelMask)) { 2239 mChannelMaskPresent = true; 2240 } else { 2241 mChannelMaskPresent = false; 2242 } 2243 2244 int32_t maxInputSize; 2245 if (msg->findInt32("max-input-size", &maxInputSize)) { 2246 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2247 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2248 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2249 } 2250 2251 int32_t priority; 2252 if (msg->findInt32("priority", &priority)) { 2253 err = setPriority(priority); 2254 } 2255 2256 int32_t rateInt = -1; 2257 float rateFloat = -1; 2258 if (!msg->findFloat("operating-rate", &rateFloat)) { 2259 msg->findInt32("operating-rate", &rateInt); 2260 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2261 } 2262 if (rateFloat > 0) { 2263 err = setOperatingRate(rateFloat, video); 2264 } 2265 2266 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame. 2267 mBaseOutputFormat = outputFormat; 2268 // trigger a kWhatOutputFormatChanged msg on first buffer 2269 mLastOutputFormat.clear(); 2270 2271 err = getPortFormat(kPortIndexInput, inputFormat); 2272 if (err == OK) { 2273 err = getPortFormat(kPortIndexOutput, outputFormat); 2274 if (err == OK) { 2275 mInputFormat = inputFormat; 2276 mOutputFormat = outputFormat; 2277 } 2278 } 2279 2280 // create data converters if needed 2281 if (!video && err == OK) { 2282 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 2283 if (encoder) { 2284 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2285 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); 2286 if (mConverter[kPortIndexInput] != NULL) { 2287 mInputFormat->setInt32("pcm-encoding", pcmEncoding); 2288 } 2289 } else { 2290 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2291 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 2292 if (mConverter[kPortIndexOutput] != NULL) { 2293 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 2294 } 2295 } 2296 } 2297 2298 return err; 2299} 2300 2301status_t ACodec::setPriority(int32_t priority) { 2302 if (priority < 0) { 2303 return BAD_VALUE; 2304 } 2305 OMX_PARAM_U32TYPE config; 2306 InitOMXParams(&config); 2307 config.nU32 = (OMX_U32)priority; 2308 status_t temp = mOMX->setConfig( 2309 mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2310 &config, sizeof(config)); 2311 if (temp != OK) { 2312 ALOGI("codec does not support config priority (err %d)", temp); 2313 } 2314 return OK; 2315} 2316 2317status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2318 if (rateFloat < 0) { 2319 return BAD_VALUE; 2320 } 2321 OMX_U32 rate; 2322 if (isVideo) { 2323 if (rateFloat > 65535) { 2324 return BAD_VALUE; 2325 } 2326 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2327 } else { 2328 if (rateFloat > UINT_MAX) { 2329 return BAD_VALUE; 2330 } 2331 rate = (OMX_U32)(rateFloat); 2332 } 2333 OMX_PARAM_U32TYPE config; 2334 InitOMXParams(&config); 2335 config.nU32 = rate; 2336 status_t err = mOMX->setConfig( 2337 mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2338 &config, sizeof(config)); 2339 if (err != OK) { 2340 ALOGI("codec does not support config operating rate (err %d)", err); 2341 } 2342 return OK; 2343} 2344 2345status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2346 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2347 InitOMXParams(¶ms); 2348 params.nPortIndex = kPortIndexOutput; 2349 status_t err = mOMX->getConfig( 2350 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2351 if (err == OK) { 2352 *intraRefreshPeriod = params.nRefreshPeriod; 2353 return OK; 2354 } 2355 2356 // Fallback to query through standard OMX index. 2357 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2358 InitOMXParams(&refreshParams); 2359 refreshParams.nPortIndex = kPortIndexOutput; 2360 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2361 err = mOMX->getParameter( 2362 mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2363 if (err != OK || refreshParams.nCirMBs == 0) { 2364 *intraRefreshPeriod = 0; 2365 return OK; 2366 } 2367 2368 // Calculate period based on width and height 2369 uint32_t width, height; 2370 OMX_PARAM_PORTDEFINITIONTYPE def; 2371 InitOMXParams(&def); 2372 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2373 def.nPortIndex = kPortIndexOutput; 2374 err = mOMX->getParameter( 2375 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2376 if (err != OK) { 2377 *intraRefreshPeriod = 0; 2378 return err; 2379 } 2380 width = video_def->nFrameWidth; 2381 height = video_def->nFrameHeight; 2382 // Use H.264/AVC MacroBlock size 16x16 2383 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2384 2385 return OK; 2386} 2387 2388status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2389 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2390 InitOMXParams(¶ms); 2391 params.nPortIndex = kPortIndexOutput; 2392 params.nRefreshPeriod = intraRefreshPeriod; 2393 status_t err = mOMX->setConfig( 2394 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2395 if (err == OK) { 2396 return OK; 2397 } 2398 2399 // Only in configure state, a component could invoke setParameter. 2400 if (!inConfigure) { 2401 return INVALID_OPERATION; 2402 } else { 2403 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2404 } 2405 2406 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2407 InitOMXParams(&refreshParams); 2408 refreshParams.nPortIndex = kPortIndexOutput; 2409 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2410 2411 if (intraRefreshPeriod == 0) { 2412 // 0 means disable intra refresh. 2413 refreshParams.nCirMBs = 0; 2414 } else { 2415 // Calculate macroblocks that need to be intra coded base on width and height 2416 uint32_t width, height; 2417 OMX_PARAM_PORTDEFINITIONTYPE def; 2418 InitOMXParams(&def); 2419 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2420 def.nPortIndex = kPortIndexOutput; 2421 err = mOMX->getParameter( 2422 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2423 if (err != OK) { 2424 return err; 2425 } 2426 width = video_def->nFrameWidth; 2427 height = video_def->nFrameHeight; 2428 // Use H.264/AVC MacroBlock size 16x16 2429 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2430 } 2431 2432 err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh, 2433 &refreshParams, sizeof(refreshParams)); 2434 if (err != OK) { 2435 return err; 2436 } 2437 2438 return OK; 2439} 2440 2441status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2442 OMX_PARAM_PORTDEFINITIONTYPE def; 2443 InitOMXParams(&def); 2444 def.nPortIndex = portIndex; 2445 2446 status_t err = mOMX->getParameter( 2447 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2448 2449 if (err != OK) { 2450 return err; 2451 } 2452 2453 if (def.nBufferSize >= size) { 2454 return OK; 2455 } 2456 2457 def.nBufferSize = size; 2458 2459 err = mOMX->setParameter( 2460 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2461 2462 if (err != OK) { 2463 return err; 2464 } 2465 2466 err = mOMX->getParameter( 2467 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2468 2469 if (err != OK) { 2470 return err; 2471 } 2472 2473 if (def.nBufferSize < size) { 2474 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2475 return FAILED_TRANSACTION; 2476 } 2477 2478 return OK; 2479} 2480 2481status_t ACodec::selectAudioPortFormat( 2482 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2483 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2484 InitOMXParams(&format); 2485 2486 format.nPortIndex = portIndex; 2487 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2488 format.nIndex = index; 2489 status_t err = mOMX->getParameter( 2490 mNode, OMX_IndexParamAudioPortFormat, 2491 &format, sizeof(format)); 2492 2493 if (err != OK) { 2494 return err; 2495 } 2496 2497 if (format.eEncoding == desiredFormat) { 2498 break; 2499 } 2500 2501 if (index == kMaxIndicesToCheck) { 2502 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2503 mComponentName.c_str(), index, 2504 asString(format.eEncoding), format.eEncoding); 2505 return ERROR_UNSUPPORTED; 2506 } 2507 } 2508 2509 return mOMX->setParameter( 2510 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2511} 2512 2513status_t ACodec::setupAACCodec( 2514 bool encoder, int32_t numChannels, int32_t sampleRate, 2515 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2516 int32_t maxOutputChannelCount, const drcParams_t& drc, 2517 int32_t pcmLimiterEnable) { 2518 if (encoder && isADTS) { 2519 return -EINVAL; 2520 } 2521 2522 status_t err = setupRawAudioFormat( 2523 encoder ? kPortIndexInput : kPortIndexOutput, 2524 sampleRate, 2525 numChannels); 2526 2527 if (err != OK) { 2528 return err; 2529 } 2530 2531 if (encoder) { 2532 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2533 2534 if (err != OK) { 2535 return err; 2536 } 2537 2538 OMX_PARAM_PORTDEFINITIONTYPE def; 2539 InitOMXParams(&def); 2540 def.nPortIndex = kPortIndexOutput; 2541 2542 err = mOMX->getParameter( 2543 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2544 2545 if (err != OK) { 2546 return err; 2547 } 2548 2549 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2550 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2551 2552 err = mOMX->setParameter( 2553 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2554 2555 if (err != OK) { 2556 return err; 2557 } 2558 2559 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2560 InitOMXParams(&profile); 2561 profile.nPortIndex = kPortIndexOutput; 2562 2563 err = mOMX->getParameter( 2564 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2565 2566 if (err != OK) { 2567 return err; 2568 } 2569 2570 profile.nChannels = numChannels; 2571 2572 profile.eChannelMode = 2573 (numChannels == 1) 2574 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2575 2576 profile.nSampleRate = sampleRate; 2577 profile.nBitRate = bitRate; 2578 profile.nAudioBandWidth = 0; 2579 profile.nFrameLength = 0; 2580 profile.nAACtools = OMX_AUDIO_AACToolAll; 2581 profile.nAACERtools = OMX_AUDIO_AACERNone; 2582 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2583 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2584 switch (sbrMode) { 2585 case 0: 2586 // disable sbr 2587 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2588 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2589 break; 2590 case 1: 2591 // enable single-rate sbr 2592 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2593 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2594 break; 2595 case 2: 2596 // enable dual-rate sbr 2597 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2598 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2599 break; 2600 case -1: 2601 // enable both modes -> the codec will decide which mode should be used 2602 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2603 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2604 break; 2605 default: 2606 // unsupported sbr mode 2607 return BAD_VALUE; 2608 } 2609 2610 2611 err = mOMX->setParameter( 2612 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2613 2614 if (err != OK) { 2615 return err; 2616 } 2617 2618 return err; 2619 } 2620 2621 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2622 InitOMXParams(&profile); 2623 profile.nPortIndex = kPortIndexInput; 2624 2625 err = mOMX->getParameter( 2626 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2627 2628 if (err != OK) { 2629 return err; 2630 } 2631 2632 profile.nChannels = numChannels; 2633 profile.nSampleRate = sampleRate; 2634 2635 profile.eAACStreamFormat = 2636 isADTS 2637 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2638 : OMX_AUDIO_AACStreamFormatMP4FF; 2639 2640 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2641 InitOMXParams(&presentation); 2642 presentation.nMaxOutputChannels = maxOutputChannelCount; 2643 presentation.nDrcCut = drc.drcCut; 2644 presentation.nDrcBoost = drc.drcBoost; 2645 presentation.nHeavyCompression = drc.heavyCompression; 2646 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2647 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2648 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2649 2650 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2651 if (res == OK) { 2652 // optional parameters, will not cause configuration failure 2653 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2654 &presentation, sizeof(presentation)); 2655 } else { 2656 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2657 } 2658 mSampleRate = sampleRate; 2659 return res; 2660} 2661 2662status_t ACodec::setupAC3Codec( 2663 bool encoder, int32_t numChannels, int32_t sampleRate) { 2664 status_t err = setupRawAudioFormat( 2665 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2666 2667 if (err != OK) { 2668 return err; 2669 } 2670 2671 if (encoder) { 2672 ALOGW("AC3 encoding is not supported."); 2673 return INVALID_OPERATION; 2674 } 2675 2676 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2677 InitOMXParams(&def); 2678 def.nPortIndex = kPortIndexInput; 2679 2680 err = mOMX->getParameter( 2681 mNode, 2682 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2683 &def, 2684 sizeof(def)); 2685 2686 if (err != OK) { 2687 return err; 2688 } 2689 2690 def.nChannels = numChannels; 2691 def.nSampleRate = sampleRate; 2692 2693 return mOMX->setParameter( 2694 mNode, 2695 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2696 &def, 2697 sizeof(def)); 2698} 2699 2700status_t ACodec::setupEAC3Codec( 2701 bool encoder, int32_t numChannels, int32_t sampleRate) { 2702 status_t err = setupRawAudioFormat( 2703 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2704 2705 if (err != OK) { 2706 return err; 2707 } 2708 2709 if (encoder) { 2710 ALOGW("EAC3 encoding is not supported."); 2711 return INVALID_OPERATION; 2712 } 2713 2714 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2715 InitOMXParams(&def); 2716 def.nPortIndex = kPortIndexInput; 2717 2718 err = mOMX->getParameter( 2719 mNode, 2720 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2721 &def, 2722 sizeof(def)); 2723 2724 if (err != OK) { 2725 return err; 2726 } 2727 2728 def.nChannels = numChannels; 2729 def.nSampleRate = sampleRate; 2730 2731 return mOMX->setParameter( 2732 mNode, 2733 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2734 &def, 2735 sizeof(def)); 2736} 2737 2738static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2739 bool isAMRWB, int32_t bps) { 2740 if (isAMRWB) { 2741 if (bps <= 6600) { 2742 return OMX_AUDIO_AMRBandModeWB0; 2743 } else if (bps <= 8850) { 2744 return OMX_AUDIO_AMRBandModeWB1; 2745 } else if (bps <= 12650) { 2746 return OMX_AUDIO_AMRBandModeWB2; 2747 } else if (bps <= 14250) { 2748 return OMX_AUDIO_AMRBandModeWB3; 2749 } else if (bps <= 15850) { 2750 return OMX_AUDIO_AMRBandModeWB4; 2751 } else if (bps <= 18250) { 2752 return OMX_AUDIO_AMRBandModeWB5; 2753 } else if (bps <= 19850) { 2754 return OMX_AUDIO_AMRBandModeWB6; 2755 } else if (bps <= 23050) { 2756 return OMX_AUDIO_AMRBandModeWB7; 2757 } 2758 2759 // 23850 bps 2760 return OMX_AUDIO_AMRBandModeWB8; 2761 } else { // AMRNB 2762 if (bps <= 4750) { 2763 return OMX_AUDIO_AMRBandModeNB0; 2764 } else if (bps <= 5150) { 2765 return OMX_AUDIO_AMRBandModeNB1; 2766 } else if (bps <= 5900) { 2767 return OMX_AUDIO_AMRBandModeNB2; 2768 } else if (bps <= 6700) { 2769 return OMX_AUDIO_AMRBandModeNB3; 2770 } else if (bps <= 7400) { 2771 return OMX_AUDIO_AMRBandModeNB4; 2772 } else if (bps <= 7950) { 2773 return OMX_AUDIO_AMRBandModeNB5; 2774 } else if (bps <= 10200) { 2775 return OMX_AUDIO_AMRBandModeNB6; 2776 } 2777 2778 // 12200 bps 2779 return OMX_AUDIO_AMRBandModeNB7; 2780 } 2781} 2782 2783status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2784 OMX_AUDIO_PARAM_AMRTYPE def; 2785 InitOMXParams(&def); 2786 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2787 2788 status_t err = 2789 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2790 2791 if (err != OK) { 2792 return err; 2793 } 2794 2795 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2796 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2797 2798 err = mOMX->setParameter( 2799 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2800 2801 if (err != OK) { 2802 return err; 2803 } 2804 2805 return setupRawAudioFormat( 2806 encoder ? kPortIndexInput : kPortIndexOutput, 2807 isWAMR ? 16000 : 8000 /* sampleRate */, 2808 1 /* numChannels */); 2809} 2810 2811status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2812 if (encoder) { 2813 return INVALID_OPERATION; 2814 } 2815 2816 return setupRawAudioFormat( 2817 kPortIndexInput, sampleRate, numChannels); 2818} 2819 2820status_t ACodec::setupFlacCodec( 2821 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2822 2823 if (encoder) { 2824 OMX_AUDIO_PARAM_FLACTYPE def; 2825 InitOMXParams(&def); 2826 def.nPortIndex = kPortIndexOutput; 2827 2828 // configure compression level 2829 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2830 if (err != OK) { 2831 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2832 return err; 2833 } 2834 def.nCompressionLevel = compressionLevel; 2835 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2836 if (err != OK) { 2837 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2838 return err; 2839 } 2840 } 2841 2842 return setupRawAudioFormat( 2843 encoder ? kPortIndexInput : kPortIndexOutput, 2844 sampleRate, 2845 numChannels); 2846} 2847 2848status_t ACodec::setupRawAudioFormat( 2849 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { 2850 OMX_PARAM_PORTDEFINITIONTYPE def; 2851 InitOMXParams(&def); 2852 def.nPortIndex = portIndex; 2853 2854 status_t err = mOMX->getParameter( 2855 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2856 2857 if (err != OK) { 2858 return err; 2859 } 2860 2861 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2862 2863 err = mOMX->setParameter( 2864 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2865 2866 if (err != OK) { 2867 return err; 2868 } 2869 2870 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2871 InitOMXParams(&pcmParams); 2872 pcmParams.nPortIndex = portIndex; 2873 2874 err = mOMX->getParameter( 2875 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2876 2877 if (err != OK) { 2878 return err; 2879 } 2880 2881 pcmParams.nChannels = numChannels; 2882 switch (encoding) { 2883 case kAudioEncodingPcm8bit: 2884 pcmParams.eNumData = OMX_NumericalDataUnsigned; 2885 pcmParams.nBitPerSample = 8; 2886 break; 2887 case kAudioEncodingPcmFloat: 2888 pcmParams.eNumData = OMX_NumericalDataFloat; 2889 pcmParams.nBitPerSample = 32; 2890 break; 2891 case kAudioEncodingPcm16bit: 2892 pcmParams.eNumData = OMX_NumericalDataSigned; 2893 pcmParams.nBitPerSample = 16; 2894 break; 2895 default: 2896 return BAD_VALUE; 2897 } 2898 pcmParams.bInterleaved = OMX_TRUE; 2899 pcmParams.nSamplingRate = sampleRate; 2900 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 2901 2902 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 2903 return OMX_ErrorNone; 2904 } 2905 2906 err = mOMX->setParameter( 2907 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2908 // if we could not set up raw format to non-16-bit, try with 16-bit 2909 // NOTE: we will also verify this via readback, in case codec ignores these fields 2910 if (err != OK && encoding != kAudioEncodingPcm16bit) { 2911 pcmParams.eNumData = OMX_NumericalDataSigned; 2912 pcmParams.nBitPerSample = 16; 2913 err = mOMX->setParameter( 2914 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2915 } 2916 return err; 2917} 2918 2919status_t ACodec::configureTunneledVideoPlayback( 2920 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 2921 native_handle_t* sidebandHandle; 2922 2923 status_t err = mOMX->configureVideoTunnelMode( 2924 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 2925 if (err != OK) { 2926 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 2927 return err; 2928 } 2929 2930 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 2931 if (err != OK) { 2932 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 2933 sidebandHandle, err); 2934 return err; 2935 } 2936 2937 return OK; 2938} 2939 2940status_t ACodec::setVideoPortFormatType( 2941 OMX_U32 portIndex, 2942 OMX_VIDEO_CODINGTYPE compressionFormat, 2943 OMX_COLOR_FORMATTYPE colorFormat, 2944 bool usingNativeBuffers) { 2945 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2946 InitOMXParams(&format); 2947 format.nPortIndex = portIndex; 2948 format.nIndex = 0; 2949 bool found = false; 2950 2951 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2952 format.nIndex = index; 2953 status_t err = mOMX->getParameter( 2954 mNode, OMX_IndexParamVideoPortFormat, 2955 &format, sizeof(format)); 2956 2957 if (err != OK) { 2958 return err; 2959 } 2960 2961 // substitute back flexible color format to codec supported format 2962 OMX_U32 flexibleEquivalent; 2963 if (compressionFormat == OMX_VIDEO_CodingUnused 2964 && isFlexibleColorFormat( 2965 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 2966 && colorFormat == flexibleEquivalent) { 2967 ALOGI("[%s] using color format %#x in place of %#x", 2968 mComponentName.c_str(), format.eColorFormat, colorFormat); 2969 colorFormat = format.eColorFormat; 2970 } 2971 2972 // The following assertion is violated by TI's video decoder. 2973 // CHECK_EQ(format.nIndex, index); 2974 2975 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 2976 if (portIndex == kPortIndexInput 2977 && colorFormat == format.eColorFormat) { 2978 // eCompressionFormat does not seem right. 2979 found = true; 2980 break; 2981 } 2982 if (portIndex == kPortIndexOutput 2983 && compressionFormat == format.eCompressionFormat) { 2984 // eColorFormat does not seem right. 2985 found = true; 2986 break; 2987 } 2988 } 2989 2990 if (format.eCompressionFormat == compressionFormat 2991 && format.eColorFormat == colorFormat) { 2992 found = true; 2993 break; 2994 } 2995 2996 if (index == kMaxIndicesToCheck) { 2997 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 2998 mComponentName.c_str(), index, 2999 asString(format.eCompressionFormat), format.eCompressionFormat, 3000 asString(format.eColorFormat), format.eColorFormat); 3001 } 3002 } 3003 3004 if (!found) { 3005 return UNKNOWN_ERROR; 3006 } 3007 3008 status_t err = mOMX->setParameter( 3009 mNode, OMX_IndexParamVideoPortFormat, 3010 &format, sizeof(format)); 3011 3012 return err; 3013} 3014 3015// Set optimal output format. OMX component lists output formats in the order 3016// of preference, but this got more complicated since the introduction of flexible 3017// YUV formats. We support a legacy behavior for applications that do not use 3018// surface output, do not specify an output format, but expect a "usable" standard 3019// OMX format. SW readable and standard formats must be flex-YUV. 3020// 3021// Suggested preference order: 3022// - optimal format for texture rendering (mediaplayer behavior) 3023// - optimal SW readable & texture renderable format (flex-YUV support) 3024// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 3025// - legacy "usable" standard formats 3026// 3027// For legacy support, we prefer a standard format, but will settle for a SW readable 3028// flex-YUV format. 3029status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 3030 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 3031 InitOMXParams(&format); 3032 format.nPortIndex = kPortIndexOutput; 3033 3034 InitOMXParams(&legacyFormat); 3035 // this field will change when we find a suitable legacy format 3036 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 3037 3038 for (OMX_U32 index = 0; ; ++index) { 3039 format.nIndex = index; 3040 status_t err = mOMX->getParameter( 3041 mNode, OMX_IndexParamVideoPortFormat, 3042 &format, sizeof(format)); 3043 if (err != OK) { 3044 // no more formats, pick legacy format if found 3045 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 3046 memcpy(&format, &legacyFormat, sizeof(format)); 3047 break; 3048 } 3049 return err; 3050 } 3051 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 3052 return OMX_ErrorBadParameter; 3053 } 3054 if (!getLegacyFlexibleFormat) { 3055 break; 3056 } 3057 // standard formats that were exposed to users before 3058 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 3059 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 3060 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 3061 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 3062 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 3063 break; 3064 } 3065 // find best legacy non-standard format 3066 OMX_U32 flexibleEquivalent; 3067 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 3068 && isFlexibleColorFormat( 3069 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */, 3070 &flexibleEquivalent) 3071 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 3072 memcpy(&legacyFormat, &format, sizeof(format)); 3073 } 3074 } 3075 return mOMX->setParameter( 3076 mNode, OMX_IndexParamVideoPortFormat, 3077 &format, sizeof(format)); 3078} 3079 3080static const struct VideoCodingMapEntry { 3081 const char *mMime; 3082 OMX_VIDEO_CODINGTYPE mVideoCodingType; 3083} kVideoCodingMapEntry[] = { 3084 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 3085 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 3086 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 3087 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 3088 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 3089 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 3090 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 3091 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 3092}; 3093 3094static status_t GetVideoCodingTypeFromMime( 3095 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3096 for (size_t i = 0; 3097 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3098 ++i) { 3099 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3100 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3101 return OK; 3102 } 3103 } 3104 3105 *codingType = OMX_VIDEO_CodingUnused; 3106 3107 return ERROR_UNSUPPORTED; 3108} 3109 3110static status_t GetMimeTypeForVideoCoding( 3111 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3112 for (size_t i = 0; 3113 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3114 ++i) { 3115 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3116 *mime = kVideoCodingMapEntry[i].mMime; 3117 return OK; 3118 } 3119 } 3120 3121 mime->clear(); 3122 3123 return ERROR_UNSUPPORTED; 3124} 3125 3126status_t ACodec::setupVideoDecoder( 3127 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3128 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3129 int32_t width, height; 3130 if (!msg->findInt32("width", &width) 3131 || !msg->findInt32("height", &height)) { 3132 return INVALID_OPERATION; 3133 } 3134 3135 OMX_VIDEO_CODINGTYPE compressionFormat; 3136 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3137 3138 if (err != OK) { 3139 return err; 3140 } 3141 3142 if (compressionFormat == OMX_VIDEO_CodingVP9) { 3143 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 3144 InitOMXParams(¶ms); 3145 params.nPortIndex = kPortIndexInput; 3146 // Check if VP9 decoder advertises supported profiles. 3147 params.nProfileIndex = 0; 3148 status_t err = mOMX->getParameter( 3149 mNode, 3150 OMX_IndexParamVideoProfileLevelQuerySupported, 3151 ¶ms, 3152 sizeof(params)); 3153 mIsLegacyVP9Decoder = err != OK; 3154 } 3155 3156 err = setVideoPortFormatType( 3157 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3158 3159 if (err != OK) { 3160 return err; 3161 } 3162 3163 int32_t tmp; 3164 if (msg->findInt32("color-format", &tmp)) { 3165 OMX_COLOR_FORMATTYPE colorFormat = 3166 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3167 err = setVideoPortFormatType( 3168 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3169 if (err != OK) { 3170 ALOGW("[%s] does not support color format %d", 3171 mComponentName.c_str(), colorFormat); 3172 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3173 } 3174 } else { 3175 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3176 } 3177 3178 if (err != OK) { 3179 return err; 3180 } 3181 3182 int32_t frameRateInt; 3183 float frameRateFloat; 3184 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3185 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3186 frameRateInt = -1; 3187 } 3188 frameRateFloat = (float)frameRateInt; 3189 } 3190 3191 err = setVideoFormatOnPort( 3192 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3193 3194 if (err != OK) { 3195 return err; 3196 } 3197 3198 err = setVideoFormatOnPort( 3199 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3200 3201 if (err != OK) { 3202 return err; 3203 } 3204 3205 err = setColorAspectsForVideoDecoder( 3206 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3207 if (err == ERROR_UNSUPPORTED) { // support is optional 3208 err = OK; 3209 } 3210 3211 if (err != OK) { 3212 return err; 3213 } 3214 3215 err = setHDRStaticInfoForVideoCodec(kPortIndexOutput, msg, outputFormat); 3216 if (err == ERROR_UNSUPPORTED) { // support is optional 3217 err = OK; 3218 } 3219 return err; 3220} 3221 3222status_t ACodec::initDescribeColorAspectsIndex() { 3223 status_t err = mOMX->getExtensionIndex( 3224 mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3225 if (err != OK) { 3226 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3227 } 3228 return err; 3229} 3230 3231status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3232 status_t err = ERROR_UNSUPPORTED; 3233 if (mDescribeColorAspectsIndex) { 3234 err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3235 } 3236 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3237 mComponentName.c_str(), 3238 params.sAspects.mRange, asString(params.sAspects.mRange), 3239 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3240 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3241 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3242 err, asString(err)); 3243 3244 if (verify && err == OK) { 3245 err = getCodecColorAspects(params); 3246 } 3247 3248 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3249 "[%s] setting color aspects failed even though codec advertises support", 3250 mComponentName.c_str()); 3251 return err; 3252} 3253 3254status_t ACodec::setColorAspectsForVideoDecoder( 3255 int32_t width, int32_t height, bool usingNativeWindow, 3256 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3257 DescribeColorAspectsParams params; 3258 InitOMXParams(¶ms); 3259 params.nPortIndex = kPortIndexOutput; 3260 3261 getColorAspectsFromFormat(configFormat, params.sAspects); 3262 if (usingNativeWindow) { 3263 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3264 // The default aspects will be set back to the output format during the 3265 // getFormat phase of configure(). Set non-Unspecified values back into the 3266 // format, in case component does not support this enumeration. 3267 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3268 } 3269 3270 (void)initDescribeColorAspectsIndex(); 3271 3272 // communicate color aspects to codec 3273 return setCodecColorAspects(params); 3274} 3275 3276status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3277 status_t err = ERROR_UNSUPPORTED; 3278 if (mDescribeColorAspectsIndex) { 3279 err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3280 } 3281 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3282 mComponentName.c_str(), 3283 params.sAspects.mRange, asString(params.sAspects.mRange), 3284 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3285 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3286 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3287 err, asString(err)); 3288 if (params.bRequestingDataSpace) { 3289 ALOGV("for dataspace %#x", params.nDataSpace); 3290 } 3291 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3292 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3293 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3294 mComponentName.c_str()); 3295 } 3296 return err; 3297} 3298 3299status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3300 DescribeColorAspectsParams params; 3301 InitOMXParams(¶ms); 3302 params.nPortIndex = kPortIndexInput; 3303 status_t err = getCodecColorAspects(params); 3304 if (err == OK) { 3305 // we only set encoder input aspects if codec supports them 3306 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3307 } 3308 return err; 3309} 3310 3311status_t ACodec::getDataSpace( 3312 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3313 bool tryCodec) { 3314 status_t err = OK; 3315 if (tryCodec) { 3316 // request dataspace guidance from codec. 3317 params.bRequestingDataSpace = OMX_TRUE; 3318 err = getCodecColorAspects(params); 3319 params.bRequestingDataSpace = OMX_FALSE; 3320 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3321 *dataSpace = (android_dataspace)params.nDataSpace; 3322 return err; 3323 } else if (err == ERROR_UNSUPPORTED) { 3324 // ignore not-implemented error for dataspace requests 3325 err = OK; 3326 } 3327 } 3328 3329 // this returns legacy versions if available 3330 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3331 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3332 "and dataspace %#x", 3333 mComponentName.c_str(), 3334 params.sAspects.mRange, asString(params.sAspects.mRange), 3335 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3336 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3337 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3338 *dataSpace); 3339 return err; 3340} 3341 3342 3343status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3344 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3345 android_dataspace *dataSpace) { 3346 DescribeColorAspectsParams params; 3347 InitOMXParams(¶ms); 3348 params.nPortIndex = kPortIndexOutput; 3349 3350 // reset default format and get resulting format 3351 getColorAspectsFromFormat(configFormat, params.sAspects); 3352 if (dataSpace != NULL) { 3353 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3354 } 3355 status_t err = setCodecColorAspects(params, true /* readBack */); 3356 3357 // we always set specified aspects for decoders 3358 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3359 3360 if (dataSpace != NULL) { 3361 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3362 if (err == OK) { 3363 err = res; 3364 } 3365 } 3366 3367 return err; 3368} 3369 3370// initial video encoder setup for bytebuffer mode 3371status_t ACodec::setColorAspectsForVideoEncoder( 3372 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3373 // copy config to output format as this is not exposed via getFormat 3374 copyColorConfig(configFormat, outputFormat); 3375 3376 DescribeColorAspectsParams params; 3377 InitOMXParams(¶ms); 3378 params.nPortIndex = kPortIndexInput; 3379 getColorAspectsFromFormat(configFormat, params.sAspects); 3380 3381 (void)initDescribeColorAspectsIndex(); 3382 3383 int32_t usingRecorder; 3384 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3385 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3386 int32_t width, height; 3387 if (configFormat->findInt32("width", &width) 3388 && configFormat->findInt32("height", &height)) { 3389 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3390 status_t err = getDataSpace( 3391 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3392 if (err != OK) { 3393 return err; 3394 } 3395 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3396 } 3397 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3398 } 3399 3400 // communicate color aspects to codec, but do not allow change of the platform aspects 3401 ColorAspects origAspects = params.sAspects; 3402 for (int triesLeft = 2; --triesLeft >= 0; ) { 3403 status_t err = setCodecColorAspects(params, true /* readBack */); 3404 if (err != OK 3405 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3406 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3407 return err; 3408 } 3409 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3410 mComponentName.c_str()); 3411 } 3412 return OK; 3413} 3414 3415status_t ACodec::setHDRStaticInfoForVideoCodec( 3416 OMX_U32 portIndex, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3417 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3418 3419 DescribeHDRStaticInfoParams params; 3420 InitOMXParams(¶ms); 3421 params.nPortIndex = portIndex; 3422 3423 HDRStaticInfo *info = ¶ms.sInfo; 3424 if (getHDRStaticInfoFromFormat(configFormat, info)) { 3425 setHDRStaticInfoIntoFormat(params.sInfo, outputFormat); 3426 } 3427 3428 (void)initDescribeHDRStaticInfoIndex(); 3429 3430 // communicate HDR static Info to codec 3431 return setHDRStaticInfo(params); 3432} 3433 3434// subsequent initial video encoder setup for surface mode 3435status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3436 android_dataspace *dataSpace /* nonnull */) { 3437 DescribeColorAspectsParams params; 3438 InitOMXParams(¶ms); 3439 params.nPortIndex = kPortIndexInput; 3440 ColorAspects &aspects = params.sAspects; 3441 3442 // reset default format and store resulting format into both input and output formats 3443 getColorAspectsFromFormat(mConfigFormat, aspects); 3444 int32_t width, height; 3445 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3446 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3447 } 3448 setColorAspectsIntoFormat(aspects, mInputFormat); 3449 setColorAspectsIntoFormat(aspects, mOutputFormat); 3450 3451 // communicate color aspects to codec, but do not allow any change 3452 ColorAspects origAspects = aspects; 3453 status_t err = OK; 3454 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3455 status_t err = setCodecColorAspects(params, true /* readBack */); 3456 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3457 break; 3458 } 3459 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3460 mComponentName.c_str()); 3461 } 3462 3463 *dataSpace = HAL_DATASPACE_BT709; 3464 aspects = origAspects; // restore desired color aspects 3465 status_t res = getDataSpace( 3466 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3467 if (err == OK) { 3468 err = res; 3469 } 3470 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3471 mInputFormat->setBuffer( 3472 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3473 3474 // update input format with codec supported color aspects (basically set unsupported 3475 // aspects to Unspecified) 3476 if (err == OK) { 3477 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3478 } 3479 3480 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3481 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3482 3483 return err; 3484} 3485 3486status_t ACodec::getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format) { 3487 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3488 DescribeHDRStaticInfoParams params; 3489 InitOMXParams(¶ms); 3490 params.nPortIndex = portIndex; 3491 3492 status_t err = getHDRStaticInfo(params); 3493 if (err == OK) { 3494 // we only set decodec output HDRStaticInfo if codec supports them 3495 setHDRStaticInfoIntoFormat(params.sInfo, format); 3496 } 3497 return err; 3498} 3499 3500status_t ACodec::initDescribeHDRStaticInfoIndex() { 3501 status_t err = mOMX->getExtensionIndex( 3502 mNode, "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex); 3503 if (err != OK) { 3504 mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0; 3505 } 3506 return err; 3507} 3508 3509status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) { 3510 status_t err = ERROR_UNSUPPORTED; 3511 if (mDescribeHDRStaticInfoIndex) { 3512 err = mOMX->setConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3513 } 3514 3515 const HDRStaticInfo *info = ¶ms.sInfo; 3516 ALOGV("[%s] setting HDRStaticInfo (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, " 3517 "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)", 3518 mComponentName.c_str(), 3519 info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y, 3520 info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y, 3521 info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance, 3522 info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel); 3523 3524 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3525 "[%s] setting HDRStaticInfo failed even though codec advertises support", 3526 mComponentName.c_str()); 3527 return err; 3528} 3529 3530status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms) { 3531 status_t err = ERROR_UNSUPPORTED; 3532 if (mDescribeHDRStaticInfoIndex) { 3533 err = mOMX->getConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3534 } 3535 3536 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3537 "[%s] getting HDRStaticInfo failed even though codec advertises support", 3538 mComponentName.c_str()); 3539 return err; 3540} 3541 3542status_t ACodec::setupVideoEncoder( 3543 const char *mime, const sp<AMessage> &msg, 3544 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3545 int32_t tmp; 3546 if (!msg->findInt32("color-format", &tmp)) { 3547 return INVALID_OPERATION; 3548 } 3549 3550 OMX_COLOR_FORMATTYPE colorFormat = 3551 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3552 3553 status_t err = setVideoPortFormatType( 3554 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3555 3556 if (err != OK) { 3557 ALOGE("[%s] does not support color format %d", 3558 mComponentName.c_str(), colorFormat); 3559 3560 return err; 3561 } 3562 3563 /* Input port configuration */ 3564 3565 OMX_PARAM_PORTDEFINITIONTYPE def; 3566 InitOMXParams(&def); 3567 3568 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3569 3570 def.nPortIndex = kPortIndexInput; 3571 3572 err = mOMX->getParameter( 3573 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3574 3575 if (err != OK) { 3576 return err; 3577 } 3578 3579 int32_t width, height, bitrate; 3580 if (!msg->findInt32("width", &width) 3581 || !msg->findInt32("height", &height) 3582 || !msg->findInt32("bitrate", &bitrate)) { 3583 return INVALID_OPERATION; 3584 } 3585 3586 video_def->nFrameWidth = width; 3587 video_def->nFrameHeight = height; 3588 3589 int32_t stride; 3590 if (!msg->findInt32("stride", &stride)) { 3591 stride = width; 3592 } 3593 3594 video_def->nStride = stride; 3595 3596 int32_t sliceHeight; 3597 if (!msg->findInt32("slice-height", &sliceHeight)) { 3598 sliceHeight = height; 3599 } 3600 3601 video_def->nSliceHeight = sliceHeight; 3602 3603 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3604 3605 float frameRate; 3606 if (!msg->findFloat("frame-rate", &frameRate)) { 3607 int32_t tmp; 3608 if (!msg->findInt32("frame-rate", &tmp)) { 3609 return INVALID_OPERATION; 3610 } 3611 frameRate = (float)tmp; 3612 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3613 } 3614 3615 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3616 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3617 // this is redundant as it was already set up in setVideoPortFormatType 3618 // FIXME for now skip this only for flexible YUV formats 3619 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3620 video_def->eColorFormat = colorFormat; 3621 } 3622 3623 err = mOMX->setParameter( 3624 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3625 3626 if (err != OK) { 3627 ALOGE("[%s] failed to set input port definition parameters.", 3628 mComponentName.c_str()); 3629 3630 return err; 3631 } 3632 3633 /* Output port configuration */ 3634 3635 OMX_VIDEO_CODINGTYPE compressionFormat; 3636 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3637 3638 if (err != OK) { 3639 return err; 3640 } 3641 3642 err = setVideoPortFormatType( 3643 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3644 3645 if (err != OK) { 3646 ALOGE("[%s] does not support compression format %d", 3647 mComponentName.c_str(), compressionFormat); 3648 3649 return err; 3650 } 3651 3652 def.nPortIndex = kPortIndexOutput; 3653 3654 err = mOMX->getParameter( 3655 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3656 3657 if (err != OK) { 3658 return err; 3659 } 3660 3661 video_def->nFrameWidth = width; 3662 video_def->nFrameHeight = height; 3663 video_def->xFramerate = 0; 3664 video_def->nBitrate = bitrate; 3665 video_def->eCompressionFormat = compressionFormat; 3666 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3667 3668 err = mOMX->setParameter( 3669 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3670 3671 if (err != OK) { 3672 ALOGE("[%s] failed to set output port definition parameters.", 3673 mComponentName.c_str()); 3674 3675 return err; 3676 } 3677 3678 int32_t intraRefreshPeriod = 0; 3679 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3680 && intraRefreshPeriod >= 0) { 3681 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3682 if (err != OK) { 3683 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3684 mComponentName.c_str()); 3685 err = OK; 3686 } 3687 } 3688 3689 switch (compressionFormat) { 3690 case OMX_VIDEO_CodingMPEG4: 3691 err = setupMPEG4EncoderParameters(msg); 3692 break; 3693 3694 case OMX_VIDEO_CodingH263: 3695 err = setupH263EncoderParameters(msg); 3696 break; 3697 3698 case OMX_VIDEO_CodingAVC: 3699 err = setupAVCEncoderParameters(msg); 3700 break; 3701 3702 case OMX_VIDEO_CodingHEVC: 3703 err = setupHEVCEncoderParameters(msg); 3704 break; 3705 3706 case OMX_VIDEO_CodingVP8: 3707 case OMX_VIDEO_CodingVP9: 3708 err = setupVPXEncoderParameters(msg); 3709 break; 3710 3711 default: 3712 break; 3713 } 3714 3715 // Set up color aspects on input, but propagate them to the output format, as they will 3716 // not be read back from encoder. 3717 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3718 if (err == ERROR_UNSUPPORTED) { 3719 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3720 err = OK; 3721 } 3722 3723 if (err != OK) { 3724 return err; 3725 } 3726 3727 err = setHDRStaticInfoForVideoCodec(kPortIndexInput, msg, outputFormat); 3728 if (err == ERROR_UNSUPPORTED) { // support is optional 3729 ALOGI("[%s] cannot encode HDR static metadata. Ignoring.", mComponentName.c_str()); 3730 err = OK; 3731 } 3732 3733 if (err == OK) { 3734 ALOGI("setupVideoEncoder succeeded"); 3735 } 3736 3737 return err; 3738} 3739 3740status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3741 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3742 InitOMXParams(¶ms); 3743 params.nPortIndex = kPortIndexOutput; 3744 3745 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3746 3747 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3748 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3749 int32_t mbs; 3750 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3751 return INVALID_OPERATION; 3752 } 3753 params.nCirMBs = mbs; 3754 } 3755 3756 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3757 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3758 int32_t mbs; 3759 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3760 return INVALID_OPERATION; 3761 } 3762 params.nAirMBs = mbs; 3763 3764 int32_t ref; 3765 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3766 return INVALID_OPERATION; 3767 } 3768 params.nAirRef = ref; 3769 } 3770 3771 status_t err = mOMX->setParameter( 3772 mNode, OMX_IndexParamVideoIntraRefresh, 3773 ¶ms, sizeof(params)); 3774 return err; 3775} 3776 3777static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) { 3778 if (iFramesInterval < 0) { 3779 return 0xFFFFFFFF; 3780 } else if (iFramesInterval == 0) { 3781 return 0; 3782 } 3783 OMX_U32 ret = frameRate * iFramesInterval; 3784 return ret; 3785} 3786 3787static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 3788 int32_t tmp; 3789 if (!msg->findInt32("bitrate-mode", &tmp)) { 3790 return OMX_Video_ControlRateVariable; 3791 } 3792 3793 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 3794} 3795 3796status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 3797 int32_t bitrate, iFrameInterval; 3798 if (!msg->findInt32("bitrate", &bitrate) 3799 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3800 return INVALID_OPERATION; 3801 } 3802 3803 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3804 3805 float frameRate; 3806 if (!msg->findFloat("frame-rate", &frameRate)) { 3807 int32_t tmp; 3808 if (!msg->findInt32("frame-rate", &tmp)) { 3809 return INVALID_OPERATION; 3810 } 3811 frameRate = (float)tmp; 3812 } 3813 3814 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 3815 InitOMXParams(&mpeg4type); 3816 mpeg4type.nPortIndex = kPortIndexOutput; 3817 3818 status_t err = mOMX->getParameter( 3819 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3820 3821 if (err != OK) { 3822 return err; 3823 } 3824 3825 mpeg4type.nSliceHeaderSpacing = 0; 3826 mpeg4type.bSVH = OMX_FALSE; 3827 mpeg4type.bGov = OMX_FALSE; 3828 3829 mpeg4type.nAllowedPictureTypes = 3830 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3831 3832 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3833 if (mpeg4type.nPFrames == 0) { 3834 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3835 } 3836 mpeg4type.nBFrames = 0; 3837 mpeg4type.nIDCVLCThreshold = 0; 3838 mpeg4type.bACPred = OMX_TRUE; 3839 mpeg4type.nMaxPacketSize = 256; 3840 mpeg4type.nTimeIncRes = 1000; 3841 mpeg4type.nHeaderExtension = 0; 3842 mpeg4type.bReversibleVLC = OMX_FALSE; 3843 3844 int32_t profile; 3845 if (msg->findInt32("profile", &profile)) { 3846 int32_t level; 3847 if (!msg->findInt32("level", &level)) { 3848 return INVALID_OPERATION; 3849 } 3850 3851 err = verifySupportForProfileAndLevel(profile, level); 3852 3853 if (err != OK) { 3854 return err; 3855 } 3856 3857 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 3858 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 3859 } 3860 3861 err = mOMX->setParameter( 3862 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3863 3864 if (err != OK) { 3865 return err; 3866 } 3867 3868 err = configureBitrate(bitrate, bitrateMode); 3869 3870 if (err != OK) { 3871 return err; 3872 } 3873 3874 return setupErrorCorrectionParameters(); 3875} 3876 3877status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 3878 int32_t bitrate, iFrameInterval; 3879 if (!msg->findInt32("bitrate", &bitrate) 3880 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3881 return INVALID_OPERATION; 3882 } 3883 3884 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3885 3886 float frameRate; 3887 if (!msg->findFloat("frame-rate", &frameRate)) { 3888 int32_t tmp; 3889 if (!msg->findInt32("frame-rate", &tmp)) { 3890 return INVALID_OPERATION; 3891 } 3892 frameRate = (float)tmp; 3893 } 3894 3895 OMX_VIDEO_PARAM_H263TYPE h263type; 3896 InitOMXParams(&h263type); 3897 h263type.nPortIndex = kPortIndexOutput; 3898 3899 status_t err = mOMX->getParameter( 3900 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3901 3902 if (err != OK) { 3903 return err; 3904 } 3905 3906 h263type.nAllowedPictureTypes = 3907 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3908 3909 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3910 if (h263type.nPFrames == 0) { 3911 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3912 } 3913 h263type.nBFrames = 0; 3914 3915 int32_t profile; 3916 if (msg->findInt32("profile", &profile)) { 3917 int32_t level; 3918 if (!msg->findInt32("level", &level)) { 3919 return INVALID_OPERATION; 3920 } 3921 3922 err = verifySupportForProfileAndLevel(profile, level); 3923 3924 if (err != OK) { 3925 return err; 3926 } 3927 3928 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 3929 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 3930 } 3931 3932 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 3933 h263type.bForceRoundingTypeToZero = OMX_FALSE; 3934 h263type.nPictureHeaderRepetition = 0; 3935 h263type.nGOBHeaderInterval = 0; 3936 3937 err = mOMX->setParameter( 3938 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3939 3940 if (err != OK) { 3941 return err; 3942 } 3943 3944 err = configureBitrate(bitrate, bitrateMode); 3945 3946 if (err != OK) { 3947 return err; 3948 } 3949 3950 return setupErrorCorrectionParameters(); 3951} 3952 3953// static 3954int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 3955 int width, int height, int rate, int bitrate, 3956 OMX_VIDEO_AVCPROFILETYPE profile) { 3957 // convert bitrate to main/baseline profile kbps equivalent 3958 switch (profile) { 3959 case OMX_VIDEO_AVCProfileHigh10: 3960 bitrate = divUp(bitrate, 3000); break; 3961 case OMX_VIDEO_AVCProfileHigh: 3962 bitrate = divUp(bitrate, 1250); break; 3963 default: 3964 bitrate = divUp(bitrate, 1000); break; 3965 } 3966 3967 // convert size and rate to MBs 3968 width = divUp(width, 16); 3969 height = divUp(height, 16); 3970 int mbs = width * height; 3971 rate *= mbs; 3972 int maxDimension = max(width, height); 3973 3974 static const int limits[][5] = { 3975 /* MBps MB dim bitrate level */ 3976 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 3977 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 3978 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 3979 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 3980 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 3981 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 3982 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 3983 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 3984 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 3985 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 3986 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 3987 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 3988 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 3989 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 3990 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 3991 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 3992 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 3993 }; 3994 3995 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 3996 const int (&limit)[5] = limits[i]; 3997 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 3998 && bitrate <= limit[3]) { 3999 return limit[4]; 4000 } 4001 } 4002 return 0; 4003} 4004 4005status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 4006 int32_t bitrate, iFrameInterval; 4007 if (!msg->findInt32("bitrate", &bitrate) 4008 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4009 return INVALID_OPERATION; 4010 } 4011 4012 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4013 4014 float frameRate; 4015 if (!msg->findFloat("frame-rate", &frameRate)) { 4016 int32_t tmp; 4017 if (!msg->findInt32("frame-rate", &tmp)) { 4018 return INVALID_OPERATION; 4019 } 4020 frameRate = (float)tmp; 4021 } 4022 4023 status_t err = OK; 4024 int32_t intraRefreshMode = 0; 4025 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 4026 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 4027 if (err != OK) { 4028 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 4029 err, intraRefreshMode); 4030 return err; 4031 } 4032 } 4033 4034 OMX_VIDEO_PARAM_AVCTYPE h264type; 4035 InitOMXParams(&h264type); 4036 h264type.nPortIndex = kPortIndexOutput; 4037 4038 err = mOMX->getParameter( 4039 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4040 4041 if (err != OK) { 4042 return err; 4043 } 4044 4045 h264type.nAllowedPictureTypes = 4046 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4047 4048 int32_t profile; 4049 if (msg->findInt32("profile", &profile)) { 4050 int32_t level; 4051 if (!msg->findInt32("level", &level)) { 4052 return INVALID_OPERATION; 4053 } 4054 4055 err = verifySupportForProfileAndLevel(profile, level); 4056 4057 if (err != OK) { 4058 return err; 4059 } 4060 4061 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 4062 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 4063 } else { 4064 // Use baseline profile for AVC recording if profile is not specified. 4065 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 4066 } 4067 4068 ALOGI("setupAVCEncoderParameters with [profile: %s] [level: %s]", 4069 asString(h264type.eProfile), asString(h264type.eLevel)); 4070 4071 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 4072 h264type.nSliceHeaderSpacing = 0; 4073 h264type.bUseHadamard = OMX_TRUE; 4074 h264type.nRefFrames = 1; 4075 h264type.nBFrames = 0; 4076 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 4077 if (h264type.nPFrames == 0) { 4078 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4079 } 4080 h264type.nRefIdx10ActiveMinus1 = 0; 4081 h264type.nRefIdx11ActiveMinus1 = 0; 4082 h264type.bEntropyCodingCABAC = OMX_FALSE; 4083 h264type.bWeightedPPrediction = OMX_FALSE; 4084 h264type.bconstIpred = OMX_FALSE; 4085 h264type.bDirect8x8Inference = OMX_FALSE; 4086 h264type.bDirectSpatialTemporal = OMX_FALSE; 4087 h264type.nCabacInitIdc = 0; 4088 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 4089 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 4090 h264type.nSliceHeaderSpacing = 0; 4091 h264type.bUseHadamard = OMX_TRUE; 4092 h264type.nRefFrames = 2; 4093 h264type.nBFrames = 1; 4094 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 4095 h264type.nAllowedPictureTypes = 4096 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 4097 h264type.nRefIdx10ActiveMinus1 = 0; 4098 h264type.nRefIdx11ActiveMinus1 = 0; 4099 h264type.bEntropyCodingCABAC = OMX_TRUE; 4100 h264type.bWeightedPPrediction = OMX_TRUE; 4101 h264type.bconstIpred = OMX_TRUE; 4102 h264type.bDirect8x8Inference = OMX_TRUE; 4103 h264type.bDirectSpatialTemporal = OMX_TRUE; 4104 h264type.nCabacInitIdc = 1; 4105 } 4106 4107 if (h264type.nBFrames != 0) { 4108 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 4109 } 4110 4111 h264type.bEnableUEP = OMX_FALSE; 4112 h264type.bEnableFMO = OMX_FALSE; 4113 h264type.bEnableASO = OMX_FALSE; 4114 h264type.bEnableRS = OMX_FALSE; 4115 h264type.bFrameMBsOnly = OMX_TRUE; 4116 h264type.bMBAFF = OMX_FALSE; 4117 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 4118 4119 err = mOMX->setParameter( 4120 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4121 4122 if (err != OK) { 4123 return err; 4124 } 4125 4126 return configureBitrate(bitrate, bitrateMode); 4127} 4128 4129status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 4130 int32_t bitrate, iFrameInterval; 4131 if (!msg->findInt32("bitrate", &bitrate) 4132 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4133 return INVALID_OPERATION; 4134 } 4135 4136 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4137 4138 float frameRate; 4139 if (!msg->findFloat("frame-rate", &frameRate)) { 4140 int32_t tmp; 4141 if (!msg->findInt32("frame-rate", &tmp)) { 4142 return INVALID_OPERATION; 4143 } 4144 frameRate = (float)tmp; 4145 } 4146 4147 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 4148 InitOMXParams(&hevcType); 4149 hevcType.nPortIndex = kPortIndexOutput; 4150 4151 status_t err = OK; 4152 err = mOMX->getParameter( 4153 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4154 if (err != OK) { 4155 return err; 4156 } 4157 4158 int32_t profile; 4159 if (msg->findInt32("profile", &profile)) { 4160 int32_t level; 4161 if (!msg->findInt32("level", &level)) { 4162 return INVALID_OPERATION; 4163 } 4164 4165 err = verifySupportForProfileAndLevel(profile, level); 4166 if (err != OK) { 4167 return err; 4168 } 4169 4170 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 4171 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 4172 } 4173 // TODO: finer control? 4174 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4175 4176 err = mOMX->setParameter( 4177 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4178 if (err != OK) { 4179 return err; 4180 } 4181 4182 return configureBitrate(bitrate, bitrateMode); 4183} 4184 4185status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { 4186 int32_t bitrate; 4187 int32_t iFrameInterval = 0; 4188 size_t tsLayers = 0; 4189 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 4190 OMX_VIDEO_VPXTemporalLayerPatternNone; 4191 static const uint32_t kVp8LayerRateAlloction 4192 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 4193 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 4194 {100, 100, 100}, // 1 layer 4195 { 60, 100, 100}, // 2 layers {60%, 40%} 4196 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 4197 }; 4198 if (!msg->findInt32("bitrate", &bitrate)) { 4199 return INVALID_OPERATION; 4200 } 4201 msg->findInt32("i-frame-interval", &iFrameInterval); 4202 4203 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4204 4205 float frameRate; 4206 if (!msg->findFloat("frame-rate", &frameRate)) { 4207 int32_t tmp; 4208 if (!msg->findInt32("frame-rate", &tmp)) { 4209 return INVALID_OPERATION; 4210 } 4211 frameRate = (float)tmp; 4212 } 4213 4214 AString tsSchema; 4215 if (msg->findString("ts-schema", &tsSchema)) { 4216 if (tsSchema == "webrtc.vp8.1-layer") { 4217 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4218 tsLayers = 1; 4219 } else if (tsSchema == "webrtc.vp8.2-layer") { 4220 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4221 tsLayers = 2; 4222 } else if (tsSchema == "webrtc.vp8.3-layer") { 4223 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4224 tsLayers = 3; 4225 } else { 4226 ALOGW("Unsupported ts-schema [%s]", tsSchema.c_str()); 4227 } 4228 } 4229 4230 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4231 InitOMXParams(&vp8type); 4232 vp8type.nPortIndex = kPortIndexOutput; 4233 status_t err = mOMX->getParameter( 4234 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4235 &vp8type, sizeof(vp8type)); 4236 4237 if (err == OK) { 4238 if (iFrameInterval > 0) { 4239 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4240 } 4241 vp8type.eTemporalPattern = pattern; 4242 vp8type.nTemporalLayerCount = tsLayers; 4243 if (tsLayers > 0) { 4244 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4245 vp8type.nTemporalLayerBitrateRatio[i] = 4246 kVp8LayerRateAlloction[tsLayers - 1][i]; 4247 } 4248 } 4249 if (bitrateMode == OMX_Video_ControlRateConstant) { 4250 vp8type.nMinQuantizer = 2; 4251 vp8type.nMaxQuantizer = 63; 4252 } 4253 4254 err = mOMX->setParameter( 4255 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4256 &vp8type, sizeof(vp8type)); 4257 if (err != OK) { 4258 ALOGW("Extended VP8 parameters set failed: %d", err); 4259 } 4260 } 4261 4262 return configureBitrate(bitrate, bitrateMode); 4263} 4264 4265status_t ACodec::verifySupportForProfileAndLevel( 4266 int32_t profile, int32_t level) { 4267 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4268 InitOMXParams(¶ms); 4269 params.nPortIndex = kPortIndexOutput; 4270 4271 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4272 params.nProfileIndex = index; 4273 status_t err = mOMX->getParameter( 4274 mNode, 4275 OMX_IndexParamVideoProfileLevelQuerySupported, 4276 ¶ms, 4277 sizeof(params)); 4278 4279 if (err != OK) { 4280 return err; 4281 } 4282 4283 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4284 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4285 4286 if (profile == supportedProfile && level <= supportedLevel) { 4287 return OK; 4288 } 4289 4290 if (index == kMaxIndicesToCheck) { 4291 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4292 mComponentName.c_str(), index, 4293 params.eProfile, params.eLevel); 4294 } 4295 } 4296 return ERROR_UNSUPPORTED; 4297} 4298 4299status_t ACodec::configureBitrate( 4300 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4301 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4302 InitOMXParams(&bitrateType); 4303 bitrateType.nPortIndex = kPortIndexOutput; 4304 4305 status_t err = mOMX->getParameter( 4306 mNode, OMX_IndexParamVideoBitrate, 4307 &bitrateType, sizeof(bitrateType)); 4308 4309 if (err != OK) { 4310 return err; 4311 } 4312 4313 bitrateType.eControlRate = bitrateMode; 4314 bitrateType.nTargetBitrate = bitrate; 4315 4316 return mOMX->setParameter( 4317 mNode, OMX_IndexParamVideoBitrate, 4318 &bitrateType, sizeof(bitrateType)); 4319} 4320 4321status_t ACodec::setupErrorCorrectionParameters() { 4322 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4323 InitOMXParams(&errorCorrectionType); 4324 errorCorrectionType.nPortIndex = kPortIndexOutput; 4325 4326 status_t err = mOMX->getParameter( 4327 mNode, OMX_IndexParamVideoErrorCorrection, 4328 &errorCorrectionType, sizeof(errorCorrectionType)); 4329 4330 if (err != OK) { 4331 return OK; // Optional feature. Ignore this failure 4332 } 4333 4334 errorCorrectionType.bEnableHEC = OMX_FALSE; 4335 errorCorrectionType.bEnableResync = OMX_TRUE; 4336 errorCorrectionType.nResynchMarkerSpacing = 256; 4337 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4338 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4339 4340 return mOMX->setParameter( 4341 mNode, OMX_IndexParamVideoErrorCorrection, 4342 &errorCorrectionType, sizeof(errorCorrectionType)); 4343} 4344 4345status_t ACodec::setVideoFormatOnPort( 4346 OMX_U32 portIndex, 4347 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4348 float frameRate) { 4349 OMX_PARAM_PORTDEFINITIONTYPE def; 4350 InitOMXParams(&def); 4351 def.nPortIndex = portIndex; 4352 4353 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4354 4355 status_t err = mOMX->getParameter( 4356 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4357 if (err != OK) { 4358 return err; 4359 } 4360 4361 if (portIndex == kPortIndexInput) { 4362 // XXX Need a (much) better heuristic to compute input buffer sizes. 4363 const size_t X = 64 * 1024; 4364 if (def.nBufferSize < X) { 4365 def.nBufferSize = X; 4366 } 4367 } 4368 4369 if (def.eDomain != OMX_PortDomainVideo) { 4370 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4371 return FAILED_TRANSACTION; 4372 } 4373 4374 video_def->nFrameWidth = width; 4375 video_def->nFrameHeight = height; 4376 4377 if (portIndex == kPortIndexInput) { 4378 video_def->eCompressionFormat = compressionFormat; 4379 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4380 if (frameRate >= 0) { 4381 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4382 } 4383 } 4384 4385 err = mOMX->setParameter( 4386 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4387 4388 return err; 4389} 4390 4391status_t ACodec::initNativeWindow() { 4392 if (mNativeWindow != NULL) { 4393 return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4394 } 4395 4396 mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4397 return OK; 4398} 4399 4400size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4401 size_t n = 0; 4402 4403 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4404 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4405 4406 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4407 ++n; 4408 } 4409 } 4410 4411 return n; 4412} 4413 4414size_t ACodec::countBuffersOwnedByNativeWindow() const { 4415 size_t n = 0; 4416 4417 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4418 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4419 4420 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4421 ++n; 4422 } 4423 } 4424 4425 return n; 4426} 4427 4428void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4429 if (mNativeWindow == NULL) { 4430 return; 4431 } 4432 4433 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4434 && dequeueBufferFromNativeWindow() != NULL) { 4435 // these buffers will be submitted as regular buffers; account for this 4436 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4437 --mMetadataBuffersToSubmit; 4438 } 4439 } 4440} 4441 4442bool ACodec::allYourBuffersAreBelongToUs( 4443 OMX_U32 portIndex) { 4444 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4445 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4446 4447 if (info->mStatus != BufferInfo::OWNED_BY_US 4448 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4449 ALOGV("[%s] Buffer %u on port %u still has status %d", 4450 mComponentName.c_str(), 4451 info->mBufferID, portIndex, info->mStatus); 4452 return false; 4453 } 4454 } 4455 4456 return true; 4457} 4458 4459bool ACodec::allYourBuffersAreBelongToUs() { 4460 return allYourBuffersAreBelongToUs(kPortIndexInput) 4461 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4462} 4463 4464void ACodec::deferMessage(const sp<AMessage> &msg) { 4465 mDeferredQueue.push_back(msg); 4466} 4467 4468void ACodec::processDeferredMessages() { 4469 List<sp<AMessage> > queue = mDeferredQueue; 4470 mDeferredQueue.clear(); 4471 4472 List<sp<AMessage> >::iterator it = queue.begin(); 4473 while (it != queue.end()) { 4474 onMessageReceived(*it++); 4475 } 4476} 4477 4478// static 4479bool ACodec::describeDefaultColorFormat(DescribeColorFormat2Params ¶ms) { 4480 MediaImage2 &image = params.sMediaImage; 4481 memset(&image, 0, sizeof(image)); 4482 4483 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4484 image.mNumPlanes = 0; 4485 4486 const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat; 4487 image.mWidth = params.nFrameWidth; 4488 image.mHeight = params.nFrameHeight; 4489 4490 // only supporting YUV420 4491 if (fmt != OMX_COLOR_FormatYUV420Planar && 4492 fmt != OMX_COLOR_FormatYUV420PackedPlanar && 4493 fmt != OMX_COLOR_FormatYUV420SemiPlanar && 4494 fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar && 4495 fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) { 4496 ALOGW("do not know color format 0x%x = %d", fmt, fmt); 4497 return false; 4498 } 4499 4500 // TEMPORARY FIX for some vendors that advertise sliceHeight as 0 4501 if (params.nStride != 0 && params.nSliceHeight == 0) { 4502 ALOGW("using sliceHeight=%u instead of what codec advertised (=0)", 4503 params.nFrameHeight); 4504 params.nSliceHeight = params.nFrameHeight; 4505 } 4506 4507 // we need stride and slice-height to be non-zero and sensible. These values were chosen to 4508 // prevent integer overflows further down the line, and do not indicate support for 4509 // 32kx32k video. 4510 if (params.nStride == 0 || params.nSliceHeight == 0 4511 || params.nStride > 32768 || params.nSliceHeight > 32768) { 4512 ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u", 4513 fmt, fmt, params.nStride, params.nSliceHeight); 4514 return false; 4515 } 4516 4517 // set-up YUV format 4518 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV; 4519 image.mNumPlanes = 3; 4520 image.mBitDepth = 8; 4521 image.mBitDepthAllocated = 8; 4522 image.mPlane[image.Y].mOffset = 0; 4523 image.mPlane[image.Y].mColInc = 1; 4524 image.mPlane[image.Y].mRowInc = params.nStride; 4525 image.mPlane[image.Y].mHorizSubsampling = 1; 4526 image.mPlane[image.Y].mVertSubsampling = 1; 4527 4528 switch ((int)fmt) { 4529 case HAL_PIXEL_FORMAT_YV12: 4530 if (params.bUsingNativeBuffers) { 4531 size_t ystride = align(params.nStride, 16); 4532 size_t cstride = align(params.nStride / 2, 16); 4533 image.mPlane[image.Y].mRowInc = ystride; 4534 4535 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight; 4536 image.mPlane[image.V].mColInc = 1; 4537 image.mPlane[image.V].mRowInc = cstride; 4538 image.mPlane[image.V].mHorizSubsampling = 2; 4539 image.mPlane[image.V].mVertSubsampling = 2; 4540 4541 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset 4542 + (cstride * params.nSliceHeight / 2); 4543 image.mPlane[image.U].mColInc = 1; 4544 image.mPlane[image.U].mRowInc = cstride; 4545 image.mPlane[image.U].mHorizSubsampling = 2; 4546 image.mPlane[image.U].mVertSubsampling = 2; 4547 break; 4548 } else { 4549 // fall through as YV12 is used for YUV420Planar by some codecs 4550 } 4551 4552 case OMX_COLOR_FormatYUV420Planar: 4553 case OMX_COLOR_FormatYUV420PackedPlanar: 4554 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4555 image.mPlane[image.U].mColInc = 1; 4556 image.mPlane[image.U].mRowInc = params.nStride / 2; 4557 image.mPlane[image.U].mHorizSubsampling = 2; 4558 image.mPlane[image.U].mVertSubsampling = 2; 4559 4560 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset 4561 + (params.nStride * params.nSliceHeight / 4); 4562 image.mPlane[image.V].mColInc = 1; 4563 image.mPlane[image.V].mRowInc = params.nStride / 2; 4564 image.mPlane[image.V].mHorizSubsampling = 2; 4565 image.mPlane[image.V].mVertSubsampling = 2; 4566 break; 4567 4568 case OMX_COLOR_FormatYUV420SemiPlanar: 4569 // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder 4570 case OMX_COLOR_FormatYUV420PackedSemiPlanar: 4571 // NV12 4572 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4573 image.mPlane[image.U].mColInc = 2; 4574 image.mPlane[image.U].mRowInc = params.nStride; 4575 image.mPlane[image.U].mHorizSubsampling = 2; 4576 image.mPlane[image.U].mVertSubsampling = 2; 4577 4578 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1; 4579 image.mPlane[image.V].mColInc = 2; 4580 image.mPlane[image.V].mRowInc = params.nStride; 4581 image.mPlane[image.V].mHorizSubsampling = 2; 4582 image.mPlane[image.V].mVertSubsampling = 2; 4583 break; 4584 4585 default: 4586 TRESPASS(); 4587 } 4588 return true; 4589} 4590 4591// static 4592bool ACodec::describeColorFormat( 4593 const sp<IOMX> &omx, IOMX::node_id node, 4594 DescribeColorFormat2Params &describeParams) 4595{ 4596 OMX_INDEXTYPE describeColorFormatIndex; 4597 if (omx->getExtensionIndex( 4598 node, "OMX.google.android.index.describeColorFormat", 4599 &describeColorFormatIndex) == OK) { 4600 DescribeColorFormatParams describeParamsV1(describeParams); 4601 if (omx->getParameter( 4602 node, describeColorFormatIndex, 4603 &describeParamsV1, sizeof(describeParamsV1)) == OK) { 4604 describeParams.initFromV1(describeParamsV1); 4605 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4606 } 4607 } else if (omx->getExtensionIndex( 4608 node, "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK 4609 && omx->getParameter( 4610 node, describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) { 4611 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4612 } 4613 4614 return describeDefaultColorFormat(describeParams); 4615} 4616 4617// static 4618bool ACodec::isFlexibleColorFormat( 4619 const sp<IOMX> &omx, IOMX::node_id node, 4620 uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) { 4621 DescribeColorFormat2Params describeParams; 4622 InitOMXParams(&describeParams); 4623 describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; 4624 // reasonable dummy values 4625 describeParams.nFrameWidth = 128; 4626 describeParams.nFrameHeight = 128; 4627 describeParams.nStride = 128; 4628 describeParams.nSliceHeight = 128; 4629 describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers; 4630 4631 CHECK(flexibleEquivalent != NULL); 4632 4633 if (!describeColorFormat(omx, node, describeParams)) { 4634 return false; 4635 } 4636 4637 const MediaImage2 &img = describeParams.sMediaImage; 4638 if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) { 4639 if (img.mNumPlanes != 3 4640 || img.mPlane[img.Y].mHorizSubsampling != 1 4641 || img.mPlane[img.Y].mVertSubsampling != 1) { 4642 return false; 4643 } 4644 4645 // YUV 420 4646 if (img.mPlane[img.U].mHorizSubsampling == 2 4647 && img.mPlane[img.U].mVertSubsampling == 2 4648 && img.mPlane[img.V].mHorizSubsampling == 2 4649 && img.mPlane[img.V].mVertSubsampling == 2) { 4650 // possible flexible YUV420 format 4651 if (img.mBitDepth <= 8) { 4652 *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible; 4653 return true; 4654 } 4655 } 4656 } 4657 return false; 4658} 4659 4660status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4661 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4662 OMX_PARAM_PORTDEFINITIONTYPE def; 4663 InitOMXParams(&def); 4664 def.nPortIndex = portIndex; 4665 4666 status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4667 if (err != OK) { 4668 return err; 4669 } 4670 4671 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4672 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4673 return BAD_VALUE; 4674 } 4675 4676 switch (def.eDomain) { 4677 case OMX_PortDomainVideo: 4678 { 4679 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4680 switch ((int)videoDef->eCompressionFormat) { 4681 case OMX_VIDEO_CodingUnused: 4682 { 4683 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4684 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4685 4686 notify->setInt32("stride", videoDef->nStride); 4687 notify->setInt32("slice-height", videoDef->nSliceHeight); 4688 notify->setInt32("color-format", videoDef->eColorFormat); 4689 4690 if (mNativeWindow == NULL) { 4691 DescribeColorFormat2Params describeParams; 4692 InitOMXParams(&describeParams); 4693 describeParams.eColorFormat = videoDef->eColorFormat; 4694 describeParams.nFrameWidth = videoDef->nFrameWidth; 4695 describeParams.nFrameHeight = videoDef->nFrameHeight; 4696 describeParams.nStride = videoDef->nStride; 4697 describeParams.nSliceHeight = videoDef->nSliceHeight; 4698 describeParams.bUsingNativeBuffers = OMX_FALSE; 4699 4700 if (describeColorFormat(mOMX, mNode, describeParams)) { 4701 notify->setBuffer( 4702 "image-data", 4703 ABuffer::CreateAsCopy( 4704 &describeParams.sMediaImage, 4705 sizeof(describeParams.sMediaImage))); 4706 4707 MediaImage2 &img = describeParams.sMediaImage; 4708 MediaImage2::PlaneInfo *plane = img.mPlane; 4709 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4710 mComponentName.c_str(), img.mWidth, img.mHeight, 4711 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4712 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4713 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4714 } 4715 } 4716 4717 int32_t width = (int32_t)videoDef->nFrameWidth; 4718 int32_t height = (int32_t)videoDef->nFrameHeight; 4719 4720 if (portIndex == kPortIndexOutput) { 4721 OMX_CONFIG_RECTTYPE rect; 4722 InitOMXParams(&rect); 4723 rect.nPortIndex = portIndex; 4724 4725 if (mOMX->getConfig( 4726 mNode, 4727 (portIndex == kPortIndexOutput ? 4728 OMX_IndexConfigCommonOutputCrop : 4729 OMX_IndexConfigCommonInputCrop), 4730 &rect, sizeof(rect)) != OK) { 4731 rect.nLeft = 0; 4732 rect.nTop = 0; 4733 rect.nWidth = videoDef->nFrameWidth; 4734 rect.nHeight = videoDef->nFrameHeight; 4735 } 4736 4737 if (rect.nLeft < 0 || 4738 rect.nTop < 0 || 4739 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4740 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4741 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 4742 rect.nLeft, rect.nTop, 4743 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 4744 videoDef->nFrameWidth, videoDef->nFrameHeight); 4745 return BAD_VALUE; 4746 } 4747 4748 notify->setRect( 4749 "crop", 4750 rect.nLeft, 4751 rect.nTop, 4752 rect.nLeft + rect.nWidth - 1, 4753 rect.nTop + rect.nHeight - 1); 4754 4755 width = rect.nWidth; 4756 height = rect.nHeight; 4757 4758 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4759 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4760 width, height, mConfigFormat, notify, 4761 mUsingNativeWindow ? &dataSpace : NULL); 4762 if (mUsingNativeWindow) { 4763 notify->setInt32("android._dataspace", dataSpace); 4764 } 4765 (void)getHDRStaticInfoForVideoCodec(kPortIndexOutput, notify); 4766 } else { 4767 (void)getInputColorAspectsForVideoEncoder(notify); 4768 if (mConfigFormat->contains("hdr-static-info")) { 4769 (void)getHDRStaticInfoForVideoCodec(kPortIndexInput, notify); 4770 } 4771 } 4772 4773 break; 4774 } 4775 4776 case OMX_VIDEO_CodingVP8: 4777 case OMX_VIDEO_CodingVP9: 4778 { 4779 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4780 InitOMXParams(&vp8type); 4781 vp8type.nPortIndex = kPortIndexOutput; 4782 status_t err = mOMX->getParameter( 4783 mNode, 4784 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4785 &vp8type, 4786 sizeof(vp8type)); 4787 4788 if (err == OK) { 4789 AString tsSchema = "none"; 4790 if (vp8type.eTemporalPattern 4791 == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { 4792 switch (vp8type.nTemporalLayerCount) { 4793 case 1: 4794 { 4795 tsSchema = "webrtc.vp8.1-layer"; 4796 break; 4797 } 4798 case 2: 4799 { 4800 tsSchema = "webrtc.vp8.2-layer"; 4801 break; 4802 } 4803 case 3: 4804 { 4805 tsSchema = "webrtc.vp8.3-layer"; 4806 break; 4807 } 4808 default: 4809 { 4810 break; 4811 } 4812 } 4813 } 4814 notify->setString("ts-schema", tsSchema); 4815 } 4816 // Fall through to set up mime. 4817 } 4818 4819 default: 4820 { 4821 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 4822 // should be CodingUnused 4823 ALOGE("Raw port video compression format is %s(%d)", 4824 asString(videoDef->eCompressionFormat), 4825 videoDef->eCompressionFormat); 4826 return BAD_VALUE; 4827 } 4828 AString mime; 4829 if (GetMimeTypeForVideoCoding( 4830 videoDef->eCompressionFormat, &mime) != OK) { 4831 notify->setString("mime", "application/octet-stream"); 4832 } else { 4833 notify->setString("mime", mime.c_str()); 4834 } 4835 uint32_t intraRefreshPeriod = 0; 4836 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 4837 && intraRefreshPeriod > 0) { 4838 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 4839 } 4840 break; 4841 } 4842 } 4843 notify->setInt32("width", videoDef->nFrameWidth); 4844 notify->setInt32("height", videoDef->nFrameHeight); 4845 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 4846 portIndex == kPortIndexInput ? "input" : "output", 4847 notify->debugString().c_str()); 4848 4849 break; 4850 } 4851 4852 case OMX_PortDomainAudio: 4853 { 4854 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 4855 4856 switch ((int)audioDef->eEncoding) { 4857 case OMX_AUDIO_CodingPCM: 4858 { 4859 OMX_AUDIO_PARAM_PCMMODETYPE params; 4860 InitOMXParams(¶ms); 4861 params.nPortIndex = portIndex; 4862 4863 err = mOMX->getParameter( 4864 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4865 if (err != OK) { 4866 return err; 4867 } 4868 4869 if (params.nChannels <= 0 4870 || (params.nChannels != 1 && !params.bInterleaved) 4871 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 4872 ALOGE("unsupported PCM port: %u channels%s, %u-bit", 4873 params.nChannels, 4874 params.bInterleaved ? " interleaved" : "", 4875 params.nBitPerSample); 4876 return FAILED_TRANSACTION; 4877 } 4878 4879 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 4880 notify->setInt32("channel-count", params.nChannels); 4881 notify->setInt32("sample-rate", params.nSamplingRate); 4882 4883 AudioEncoding encoding = kAudioEncodingPcm16bit; 4884 if (params.eNumData == OMX_NumericalDataUnsigned 4885 && params.nBitPerSample == 8u) { 4886 encoding = kAudioEncodingPcm8bit; 4887 } else if (params.eNumData == OMX_NumericalDataFloat 4888 && params.nBitPerSample == 32u) { 4889 encoding = kAudioEncodingPcmFloat; 4890 } else if (params.nBitPerSample != 16u 4891 || params.eNumData != OMX_NumericalDataSigned) { 4892 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", 4893 asString(params.eNumData), params.eNumData, 4894 asString(params.ePCMMode), params.ePCMMode); 4895 return FAILED_TRANSACTION; 4896 } 4897 notify->setInt32("pcm-encoding", encoding); 4898 4899 if (mChannelMaskPresent) { 4900 notify->setInt32("channel-mask", mChannelMask); 4901 } 4902 break; 4903 } 4904 4905 case OMX_AUDIO_CodingAAC: 4906 { 4907 OMX_AUDIO_PARAM_AACPROFILETYPE params; 4908 InitOMXParams(¶ms); 4909 params.nPortIndex = portIndex; 4910 4911 err = mOMX->getParameter( 4912 mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 4913 if (err != OK) { 4914 return err; 4915 } 4916 4917 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 4918 notify->setInt32("channel-count", params.nChannels); 4919 notify->setInt32("sample-rate", params.nSampleRate); 4920 break; 4921 } 4922 4923 case OMX_AUDIO_CodingAMR: 4924 { 4925 OMX_AUDIO_PARAM_AMRTYPE params; 4926 InitOMXParams(¶ms); 4927 params.nPortIndex = portIndex; 4928 4929 err = mOMX->getParameter( 4930 mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 4931 if (err != OK) { 4932 return err; 4933 } 4934 4935 notify->setInt32("channel-count", 1); 4936 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 4937 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 4938 notify->setInt32("sample-rate", 16000); 4939 } else { 4940 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 4941 notify->setInt32("sample-rate", 8000); 4942 } 4943 break; 4944 } 4945 4946 case OMX_AUDIO_CodingFLAC: 4947 { 4948 OMX_AUDIO_PARAM_FLACTYPE params; 4949 InitOMXParams(¶ms); 4950 params.nPortIndex = portIndex; 4951 4952 err = mOMX->getParameter( 4953 mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 4954 if (err != OK) { 4955 return err; 4956 } 4957 4958 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 4959 notify->setInt32("channel-count", params.nChannels); 4960 notify->setInt32("sample-rate", params.nSampleRate); 4961 break; 4962 } 4963 4964 case OMX_AUDIO_CodingMP3: 4965 { 4966 OMX_AUDIO_PARAM_MP3TYPE params; 4967 InitOMXParams(¶ms); 4968 params.nPortIndex = portIndex; 4969 4970 err = mOMX->getParameter( 4971 mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 4972 if (err != OK) { 4973 return err; 4974 } 4975 4976 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 4977 notify->setInt32("channel-count", params.nChannels); 4978 notify->setInt32("sample-rate", params.nSampleRate); 4979 break; 4980 } 4981 4982 case OMX_AUDIO_CodingVORBIS: 4983 { 4984 OMX_AUDIO_PARAM_VORBISTYPE params; 4985 InitOMXParams(¶ms); 4986 params.nPortIndex = portIndex; 4987 4988 err = mOMX->getParameter( 4989 mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 4990 if (err != OK) { 4991 return err; 4992 } 4993 4994 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 4995 notify->setInt32("channel-count", params.nChannels); 4996 notify->setInt32("sample-rate", params.nSampleRate); 4997 break; 4998 } 4999 5000 case OMX_AUDIO_CodingAndroidAC3: 5001 { 5002 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 5003 InitOMXParams(¶ms); 5004 params.nPortIndex = portIndex; 5005 5006 err = mOMX->getParameter( 5007 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 5008 ¶ms, sizeof(params)); 5009 if (err != OK) { 5010 return err; 5011 } 5012 5013 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 5014 notify->setInt32("channel-count", params.nChannels); 5015 notify->setInt32("sample-rate", params.nSampleRate); 5016 break; 5017 } 5018 5019 case OMX_AUDIO_CodingAndroidEAC3: 5020 { 5021 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 5022 InitOMXParams(¶ms); 5023 params.nPortIndex = portIndex; 5024 5025 err = mOMX->getParameter( 5026 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 5027 ¶ms, sizeof(params)); 5028 if (err != OK) { 5029 return err; 5030 } 5031 5032 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 5033 notify->setInt32("channel-count", params.nChannels); 5034 notify->setInt32("sample-rate", params.nSampleRate); 5035 break; 5036 } 5037 5038 case OMX_AUDIO_CodingAndroidOPUS: 5039 { 5040 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 5041 InitOMXParams(¶ms); 5042 params.nPortIndex = portIndex; 5043 5044 err = mOMX->getParameter( 5045 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 5046 ¶ms, sizeof(params)); 5047 if (err != OK) { 5048 return err; 5049 } 5050 5051 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 5052 notify->setInt32("channel-count", params.nChannels); 5053 notify->setInt32("sample-rate", params.nSampleRate); 5054 break; 5055 } 5056 5057 case OMX_AUDIO_CodingG711: 5058 { 5059 OMX_AUDIO_PARAM_PCMMODETYPE params; 5060 InitOMXParams(¶ms); 5061 params.nPortIndex = portIndex; 5062 5063 err = mOMX->getParameter( 5064 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5065 if (err != OK) { 5066 return err; 5067 } 5068 5069 const char *mime = NULL; 5070 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 5071 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 5072 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 5073 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 5074 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 5075 mime = MEDIA_MIMETYPE_AUDIO_RAW; 5076 } 5077 notify->setString("mime", mime); 5078 notify->setInt32("channel-count", params.nChannels); 5079 notify->setInt32("sample-rate", params.nSamplingRate); 5080 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 5081 break; 5082 } 5083 5084 case OMX_AUDIO_CodingGSMFR: 5085 { 5086 OMX_AUDIO_PARAM_PCMMODETYPE params; 5087 InitOMXParams(¶ms); 5088 params.nPortIndex = portIndex; 5089 5090 err = mOMX->getParameter( 5091 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5092 if (err != OK) { 5093 return err; 5094 } 5095 5096 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 5097 notify->setInt32("channel-count", params.nChannels); 5098 notify->setInt32("sample-rate", params.nSamplingRate); 5099 break; 5100 } 5101 5102 default: 5103 ALOGE("Unsupported audio coding: %s(%d)\n", 5104 asString(audioDef->eEncoding), audioDef->eEncoding); 5105 return BAD_TYPE; 5106 } 5107 break; 5108 } 5109 5110 default: 5111 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 5112 return BAD_TYPE; 5113 } 5114 5115 return OK; 5116} 5117 5118void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 5119 // aspects are normally communicated in ColorAspects 5120 int32_t range, standard, transfer; 5121 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 5122 5123 // if some aspects are unspecified, use dataspace fields 5124 if (range != 0) { 5125 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 5126 } 5127 if (standard != 0) { 5128 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 5129 } 5130 if (transfer != 0) { 5131 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 5132 } 5133 5134 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 5135 if (range != 0) { 5136 mOutputFormat->setInt32("color-range", range); 5137 } 5138 if (standard != 0) { 5139 mOutputFormat->setInt32("color-standard", standard); 5140 } 5141 if (transfer != 0) { 5142 mOutputFormat->setInt32("color-transfer", transfer); 5143 } 5144 5145 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 5146 "(R:%d(%s), S:%d(%s), T:%d(%s))", 5147 dataSpace, 5148 aspects.mRange, asString(aspects.mRange), 5149 aspects.mPrimaries, asString(aspects.mPrimaries), 5150 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 5151 aspects.mTransfer, asString(aspects.mTransfer), 5152 range, asString((ColorRange)range), 5153 standard, asString((ColorStandard)standard), 5154 transfer, asString((ColorTransfer)transfer)); 5155} 5156 5157void ACodec::onOutputFormatChanged(sp<const AMessage> expectedFormat) { 5158 // store new output format, at the same time mark that this is no longer the first frame 5159 mOutputFormat = mBaseOutputFormat->dup(); 5160 5161 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 5162 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 5163 return; 5164 } 5165 5166 if (expectedFormat != NULL) { 5167 sp<const AMessage> changes = expectedFormat->changesFrom(mOutputFormat); 5168 sp<const AMessage> to = mOutputFormat->changesFrom(expectedFormat); 5169 if (changes->countEntries() != 0 || to->countEntries() != 0) { 5170 ALOGW("[%s] BAD CODEC: Output format changed unexpectedly from (diff) %s to (diff) %s", 5171 mComponentName.c_str(), 5172 changes->debugString(4).c_str(), to->debugString(4).c_str()); 5173 } 5174 } 5175 5176 if (!mIsVideo && !mIsEncoder) { 5177 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 5178 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5179 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 5180 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5181 5182 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 5183 if (mConverter[kPortIndexOutput] != NULL) { 5184 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 5185 } 5186 } 5187 5188 if (mTunneled) { 5189 sendFormatChange(); 5190 } 5191} 5192 5193void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 5194 AString mime; 5195 CHECK(mOutputFormat->findString("mime", &mime)); 5196 5197 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 5198 // notify renderer of the crop change and dataspace change 5199 // NOTE: native window uses extended right-bottom coordinate 5200 int32_t left, top, right, bottom; 5201 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 5202 notify->setRect("crop", left, top, right + 1, bottom + 1); 5203 } 5204 5205 int32_t dataSpace; 5206 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 5207 notify->setInt32("dataspace", dataSpace); 5208 } 5209 } 5210} 5211 5212void ACodec::sendFormatChange() { 5213 AString mime; 5214 CHECK(mOutputFormat->findString("mime", &mime)); 5215 5216 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 5217 int32_t channelCount, sampleRate; 5218 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 5219 CHECK(mOutputFormat->findInt32("sample-rate", &sampleRate)); 5220 if (mSampleRate != 0 && sampleRate != 0) { 5221 mEncoderDelay = mEncoderDelay * sampleRate / mSampleRate; 5222 mEncoderPadding = mEncoderPadding * sampleRate / mSampleRate; 5223 mSampleRate = sampleRate; 5224 } 5225 if (mSkipCutBuffer != NULL) { 5226 size_t prevbufsize = mSkipCutBuffer->size(); 5227 if (prevbufsize != 0) { 5228 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 5229 } 5230 } 5231 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 5232 } 5233 5234 sp<AMessage> notify = mNotify->dup(); 5235 notify->setInt32("what", kWhatOutputFormatChanged); 5236 notify->setMessage("format", mOutputFormat); 5237 notify->post(); 5238 5239 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 5240 mLastOutputFormat = mOutputFormat; 5241} 5242 5243void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 5244 sp<AMessage> notify = mNotify->dup(); 5245 notify->setInt32("what", CodecBase::kWhatError); 5246 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 5247 5248 if (internalError == UNKNOWN_ERROR) { // find better error code 5249 const status_t omxStatus = statusFromOMXError(error); 5250 if (omxStatus != 0) { 5251 internalError = omxStatus; 5252 } else { 5253 ALOGW("Invalid OMX error %#x", error); 5254 } 5255 } 5256 5257 mFatalError = true; 5258 5259 notify->setInt32("err", internalError); 5260 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5261 notify->post(); 5262} 5263 5264//////////////////////////////////////////////////////////////////////////////// 5265 5266ACodec::PortDescription::PortDescription() { 5267} 5268 5269status_t ACodec::requestIDRFrame() { 5270 if (!mIsEncoder) { 5271 return ERROR_UNSUPPORTED; 5272 } 5273 5274 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5275 InitOMXParams(¶ms); 5276 5277 params.nPortIndex = kPortIndexOutput; 5278 params.IntraRefreshVOP = OMX_TRUE; 5279 5280 return mOMX->setConfig( 5281 mNode, 5282 OMX_IndexConfigVideoIntraVOPRefresh, 5283 ¶ms, 5284 sizeof(params)); 5285} 5286 5287void ACodec::PortDescription::addBuffer( 5288 IOMX::buffer_id id, const sp<ABuffer> &buffer, 5289 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) { 5290 mBufferIDs.push_back(id); 5291 mBuffers.push_back(buffer); 5292 mHandles.push_back(handle); 5293 mMemRefs.push_back(memRef); 5294} 5295 5296size_t ACodec::PortDescription::countBuffers() { 5297 return mBufferIDs.size(); 5298} 5299 5300IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5301 return mBufferIDs.itemAt(index); 5302} 5303 5304sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5305 return mBuffers.itemAt(index); 5306} 5307 5308sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const { 5309 return mHandles.itemAt(index); 5310} 5311 5312sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const { 5313 return mMemRefs.itemAt(index); 5314} 5315 5316//////////////////////////////////////////////////////////////////////////////// 5317 5318ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5319 : AState(parentState), 5320 mCodec(codec) { 5321} 5322 5323ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5324 OMX_U32 /* portIndex */) { 5325 return KEEP_BUFFERS; 5326} 5327 5328bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5329 switch (msg->what()) { 5330 case kWhatInputBufferFilled: 5331 { 5332 onInputBufferFilled(msg); 5333 break; 5334 } 5335 5336 case kWhatOutputBufferDrained: 5337 { 5338 onOutputBufferDrained(msg); 5339 break; 5340 } 5341 5342 case ACodec::kWhatOMXMessageList: 5343 { 5344 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5345 } 5346 5347 case ACodec::kWhatOMXMessageItem: 5348 { 5349 // no need to check as we already did it for kWhatOMXMessageList 5350 return onOMXMessage(msg); 5351 } 5352 5353 case ACodec::kWhatOMXMessage: 5354 { 5355 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5356 } 5357 5358 case ACodec::kWhatSetSurface: 5359 { 5360 sp<AReplyToken> replyID; 5361 CHECK(msg->senderAwaitsResponse(&replyID)); 5362 5363 sp<RefBase> obj; 5364 CHECK(msg->findObject("surface", &obj)); 5365 5366 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5367 5368 sp<AMessage> response = new AMessage; 5369 response->setInt32("err", err); 5370 response->postReply(replyID); 5371 break; 5372 } 5373 5374 case ACodec::kWhatCreateInputSurface: 5375 case ACodec::kWhatSetInputSurface: 5376 case ACodec::kWhatSignalEndOfInputStream: 5377 { 5378 // This may result in an app illegal state exception. 5379 ALOGE("Message 0x%x was not handled", msg->what()); 5380 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5381 return true; 5382 } 5383 5384 case ACodec::kWhatOMXDied: 5385 { 5386 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5387 ALOGE("OMX/mediaserver died, signalling error!"); 5388 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5389 break; 5390 } 5391 5392 case ACodec::kWhatReleaseCodecInstance: 5393 { 5394 ALOGI("[%s] forcing the release of codec", 5395 mCodec->mComponentName.c_str()); 5396 status_t err = mCodec->mOMX->freeNode(mCodec->mNode); 5397 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5398 mCodec->mComponentName.c_str(), err); 5399 sp<AMessage> notify = mCodec->mNotify->dup(); 5400 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5401 notify->post(); 5402 break; 5403 } 5404 5405 default: 5406 return false; 5407 } 5408 5409 return true; 5410} 5411 5412bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5413 // there is a possibility that this is an outstanding message for a 5414 // codec that we have already destroyed 5415 if (mCodec->mNode == 0) { 5416 ALOGI("ignoring message as already freed component: %s", 5417 msg->debugString().c_str()); 5418 return false; 5419 } 5420 5421 IOMX::node_id nodeID; 5422 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 5423 if (nodeID != mCodec->mNode) { 5424 ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode); 5425 return false; 5426 } 5427 return true; 5428} 5429 5430bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5431 sp<RefBase> obj; 5432 CHECK(msg->findObject("messages", &obj)); 5433 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5434 5435 bool receivedRenderedEvents = false; 5436 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5437 it != msgList->getList().cend(); ++it) { 5438 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5439 mCodec->handleMessage(*it); 5440 int32_t type; 5441 CHECK((*it)->findInt32("type", &type)); 5442 if (type == omx_message::FRAME_RENDERED) { 5443 receivedRenderedEvents = true; 5444 } 5445 } 5446 5447 if (receivedRenderedEvents) { 5448 // NOTE: all buffers are rendered in this case 5449 mCodec->notifyOfRenderedFrames(); 5450 } 5451 return true; 5452} 5453 5454bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5455 int32_t type; 5456 CHECK(msg->findInt32("type", &type)); 5457 5458 switch (type) { 5459 case omx_message::EVENT: 5460 { 5461 int32_t event, data1, data2; 5462 CHECK(msg->findInt32("event", &event)); 5463 CHECK(msg->findInt32("data1", &data1)); 5464 CHECK(msg->findInt32("data2", &data2)); 5465 5466 if (event == OMX_EventCmdComplete 5467 && data1 == OMX_CommandFlush 5468 && data2 == (int32_t)OMX_ALL) { 5469 // Use of this notification is not consistent across 5470 // implementations. We'll drop this notification and rely 5471 // on flush-complete notifications on the individual port 5472 // indices instead. 5473 5474 return true; 5475 } 5476 5477 return onOMXEvent( 5478 static_cast<OMX_EVENTTYPE>(event), 5479 static_cast<OMX_U32>(data1), 5480 static_cast<OMX_U32>(data2)); 5481 } 5482 5483 case omx_message::EMPTY_BUFFER_DONE: 5484 { 5485 IOMX::buffer_id bufferID; 5486 int32_t fenceFd; 5487 5488 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5489 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5490 5491 return onOMXEmptyBufferDone(bufferID, fenceFd); 5492 } 5493 5494 case omx_message::FILL_BUFFER_DONE: 5495 { 5496 IOMX::buffer_id bufferID; 5497 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5498 5499 int32_t rangeOffset, rangeLength, flags, fenceFd; 5500 int64_t timeUs; 5501 5502 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5503 CHECK(msg->findInt32("range_length", &rangeLength)); 5504 CHECK(msg->findInt32("flags", &flags)); 5505 CHECK(msg->findInt64("timestamp", &timeUs)); 5506 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5507 5508 return onOMXFillBufferDone( 5509 bufferID, 5510 (size_t)rangeOffset, (size_t)rangeLength, 5511 (OMX_U32)flags, 5512 timeUs, 5513 fenceFd); 5514 } 5515 5516 case omx_message::FRAME_RENDERED: 5517 { 5518 int64_t mediaTimeUs, systemNano; 5519 5520 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5521 CHECK(msg->findInt64("system_nano", &systemNano)); 5522 5523 return onOMXFrameRendered( 5524 mediaTimeUs, systemNano); 5525 } 5526 5527 default: 5528 ALOGE("Unexpected message type: %d", type); 5529 return false; 5530 } 5531} 5532 5533bool ACodec::BaseState::onOMXFrameRendered( 5534 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5535 // ignore outside of Executing and PortSettingsChanged states 5536 return true; 5537} 5538 5539bool ACodec::BaseState::onOMXEvent( 5540 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5541 if (event == OMX_EventDataSpaceChanged) { 5542 ColorAspects aspects; 5543 aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF); 5544 aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF); 5545 aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF); 5546 aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF); 5547 5548 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5549 return true; 5550 } 5551 5552 if (event != OMX_EventError) { 5553 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5554 mCodec->mComponentName.c_str(), event, data1, data2); 5555 5556 return false; 5557 } 5558 5559 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5560 5561 // verify OMX component sends back an error we expect. 5562 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5563 if (!isOMXError(omxError)) { 5564 ALOGW("Invalid OMX error %#x", omxError); 5565 omxError = OMX_ErrorUndefined; 5566 } 5567 mCodec->signalError(omxError); 5568 5569 return true; 5570} 5571 5572bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5573 ALOGV("[%s] onOMXEmptyBufferDone %u", 5574 mCodec->mComponentName.c_str(), bufferID); 5575 5576 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5577 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5578 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5579 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5580 mCodec->dumpBuffers(kPortIndexInput); 5581 if (fenceFd >= 0) { 5582 ::close(fenceFd); 5583 } 5584 return false; 5585 } 5586 info->mStatus = BufferInfo::OWNED_BY_US; 5587 5588 // input buffers cannot take fences, so wait for any fence now 5589 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5590 fenceFd = -1; 5591 5592 // still save fence for completeness 5593 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5594 5595 // We're in "store-metadata-in-buffers" mode, the underlying 5596 // OMX component had access to data that's implicitly refcounted 5597 // by this "MediaBuffer" object. Now that the OMX component has 5598 // told us that it's done with the input buffer, we can decrement 5599 // the mediaBuffer's reference count. 5600 info->mData->setMediaBufferBase(NULL); 5601 5602 PortMode mode = getPortMode(kPortIndexInput); 5603 5604 switch (mode) { 5605 case KEEP_BUFFERS: 5606 break; 5607 5608 case RESUBMIT_BUFFERS: 5609 postFillThisBuffer(info); 5610 break; 5611 5612 case FREE_BUFFERS: 5613 default: 5614 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5615 return false; 5616 } 5617 5618 return true; 5619} 5620 5621void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5622 if (mCodec->mPortEOS[kPortIndexInput]) { 5623 return; 5624 } 5625 5626 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5627 5628 sp<AMessage> notify = mCodec->mNotify->dup(); 5629 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5630 notify->setInt32("buffer-id", info->mBufferID); 5631 5632 info->mData->meta()->clear(); 5633 notify->setBuffer("buffer", info->mData); 5634 5635 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5636 reply->setInt32("buffer-id", info->mBufferID); 5637 5638 notify->setMessage("reply", reply); 5639 5640 notify->post(); 5641 5642 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5643} 5644 5645void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5646 IOMX::buffer_id bufferID; 5647 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5648 sp<ABuffer> buffer; 5649 int32_t err = OK; 5650 bool eos = false; 5651 PortMode mode = getPortMode(kPortIndexInput); 5652 5653 if (!msg->findBuffer("buffer", &buffer)) { 5654 /* these are unfilled buffers returned by client */ 5655 CHECK(msg->findInt32("err", &err)); 5656 5657 if (err == OK) { 5658 /* buffers with no errors are returned on MediaCodec.flush */ 5659 mode = KEEP_BUFFERS; 5660 } else { 5661 ALOGV("[%s] saw error %d instead of an input buffer", 5662 mCodec->mComponentName.c_str(), err); 5663 eos = true; 5664 } 5665 5666 buffer.clear(); 5667 } 5668 5669 int32_t tmp; 5670 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5671 eos = true; 5672 err = ERROR_END_OF_STREAM; 5673 } 5674 5675 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5676 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5677 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5678 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5679 mCodec->dumpBuffers(kPortIndexInput); 5680 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5681 return; 5682 } 5683 5684 info->mStatus = BufferInfo::OWNED_BY_US; 5685 5686 switch (mode) { 5687 case KEEP_BUFFERS: 5688 { 5689 if (eos) { 5690 if (!mCodec->mPortEOS[kPortIndexInput]) { 5691 mCodec->mPortEOS[kPortIndexInput] = true; 5692 mCodec->mInputEOSResult = err; 5693 } 5694 } 5695 break; 5696 } 5697 5698 case RESUBMIT_BUFFERS: 5699 { 5700 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5701 // Do not send empty input buffer w/o EOS to the component. 5702 if (buffer->size() == 0 && !eos) { 5703 postFillThisBuffer(info); 5704 break; 5705 } 5706 5707 int64_t timeUs; 5708 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5709 5710 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5711 5712 int32_t isCSD; 5713 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5714 if (mCodec->mIsLegacyVP9Decoder) { 5715 ALOGV("[%s] is legacy VP9 decoder. Ignore %u codec specific data", 5716 mCodec->mComponentName.c_str(), bufferID); 5717 postFillThisBuffer(info); 5718 break; 5719 } 5720 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5721 } 5722 5723 if (eos) { 5724 flags |= OMX_BUFFERFLAG_EOS; 5725 } 5726 5727 if (buffer != info->mCodecData) { 5728 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5729 mCodec->mComponentName.c_str(), 5730 bufferID, 5731 buffer.get(), info->mCodecData.get()); 5732 5733 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; 5734 if (converter == NULL) { 5735 converter = getCopyConverter(); 5736 } 5737 status_t err = converter->convert(buffer, info->mCodecData); 5738 if (err != OK) { 5739 mCodec->signalError(OMX_ErrorUndefined, err); 5740 return; 5741 } 5742 } 5743 5744 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5745 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5746 mCodec->mComponentName.c_str(), bufferID); 5747 } else if (flags & OMX_BUFFERFLAG_EOS) { 5748 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5749 mCodec->mComponentName.c_str(), bufferID); 5750 } else { 5751#if TRACK_BUFFER_TIMING 5752 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5753 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5754#else 5755 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5756 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5757#endif 5758 } 5759 5760#if TRACK_BUFFER_TIMING 5761 ACodec::BufferStats stats; 5762 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5763 stats.mFillBufferDoneTimeUs = -1ll; 5764 mCodec->mBufferStats.add(timeUs, stats); 5765#endif 5766 5767 if (mCodec->storingMetadataInDecodedBuffers()) { 5768 // try to submit an output buffer for each input buffer 5769 PortMode outputMode = getPortMode(kPortIndexOutput); 5770 5771 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 5772 mCodec->mMetadataBuffersToSubmit, 5773 (outputMode == FREE_BUFFERS ? "FREE" : 5774 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 5775 if (outputMode == RESUBMIT_BUFFERS) { 5776 mCodec->submitOutputMetadataBuffer(); 5777 } 5778 } 5779 info->checkReadFence("onInputBufferFilled"); 5780 status_t err2 = mCodec->mOMX->emptyBuffer( 5781 mCodec->mNode, 5782 bufferID, 5783 0, 5784 info->mCodecData->size(), 5785 flags, 5786 timeUs, 5787 info->mFenceFd); 5788 info->mFenceFd = -1; 5789 if (err2 != OK) { 5790 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5791 return; 5792 } 5793 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5794 5795 if (!eos && err == OK) { 5796 getMoreInputDataIfPossible(); 5797 } else { 5798 ALOGV("[%s] Signalled EOS (%d) on the input port", 5799 mCodec->mComponentName.c_str(), err); 5800 5801 mCodec->mPortEOS[kPortIndexInput] = true; 5802 mCodec->mInputEOSResult = err; 5803 } 5804 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 5805 if (err != OK && err != ERROR_END_OF_STREAM) { 5806 ALOGV("[%s] Signalling EOS on the input port due to error %d", 5807 mCodec->mComponentName.c_str(), err); 5808 } else { 5809 ALOGV("[%s] Signalling EOS on the input port", 5810 mCodec->mComponentName.c_str()); 5811 } 5812 5813 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 5814 mCodec->mComponentName.c_str(), bufferID); 5815 5816 info->checkReadFence("onInputBufferFilled"); 5817 status_t err2 = mCodec->mOMX->emptyBuffer( 5818 mCodec->mNode, 5819 bufferID, 5820 0, 5821 0, 5822 OMX_BUFFERFLAG_EOS, 5823 0, 5824 info->mFenceFd); 5825 info->mFenceFd = -1; 5826 if (err2 != OK) { 5827 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5828 return; 5829 } 5830 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5831 5832 mCodec->mPortEOS[kPortIndexInput] = true; 5833 mCodec->mInputEOSResult = err; 5834 } 5835 break; 5836 } 5837 5838 case FREE_BUFFERS: 5839 break; 5840 5841 default: 5842 ALOGE("invalid port mode: %d", mode); 5843 break; 5844 } 5845} 5846 5847void ACodec::BaseState::getMoreInputDataIfPossible() { 5848 if (mCodec->mPortEOS[kPortIndexInput]) { 5849 return; 5850 } 5851 5852 BufferInfo *eligible = NULL; 5853 5854 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 5855 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5856 5857#if 0 5858 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 5859 // There's already a "read" pending. 5860 return; 5861 } 5862#endif 5863 5864 if (info->mStatus == BufferInfo::OWNED_BY_US) { 5865 eligible = info; 5866 } 5867 } 5868 5869 if (eligible == NULL) { 5870 return; 5871 } 5872 5873 postFillThisBuffer(eligible); 5874} 5875 5876bool ACodec::BaseState::onOMXFillBufferDone( 5877 IOMX::buffer_id bufferID, 5878 size_t rangeOffset, size_t rangeLength, 5879 OMX_U32 flags, 5880 int64_t timeUs, 5881 int fenceFd) { 5882 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 5883 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 5884 5885 ssize_t index; 5886 status_t err= OK; 5887 5888#if TRACK_BUFFER_TIMING 5889 index = mCodec->mBufferStats.indexOfKey(timeUs); 5890 if (index >= 0) { 5891 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 5892 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 5893 5894 ALOGI("frame PTS %lld: %lld", 5895 timeUs, 5896 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 5897 5898 mCodec->mBufferStats.removeItemsAt(index); 5899 stats = NULL; 5900 } 5901#endif 5902 5903 BufferInfo *info = 5904 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5905 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5906 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5907 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5908 mCodec->dumpBuffers(kPortIndexOutput); 5909 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5910 if (fenceFd >= 0) { 5911 ::close(fenceFd); 5912 } 5913 return true; 5914 } 5915 5916 info->mDequeuedAt = ++mCodec->mDequeueCounter; 5917 info->mStatus = BufferInfo::OWNED_BY_US; 5918 5919 if (info->mRenderInfo != NULL) { 5920 // The fence for an emptied buffer must have signaled, but there still could be queued 5921 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 5922 // as we will soon requeue this buffer to the surface. While in theory we could still keep 5923 // track of buffers that are requeued to the surface, it is better to add support to the 5924 // buffer-queue to notify us of released buffers and their fences (in the future). 5925 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 5926 } 5927 5928 // byte buffers cannot take fences, so wait for any fence now 5929 if (mCodec->mNativeWindow == NULL) { 5930 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 5931 fenceFd = -1; 5932 } 5933 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 5934 5935 PortMode mode = getPortMode(kPortIndexOutput); 5936 5937 switch (mode) { 5938 case KEEP_BUFFERS: 5939 break; 5940 5941 case RESUBMIT_BUFFERS: 5942 { 5943 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 5944 || mCodec->mPortEOS[kPortIndexOutput])) { 5945 ALOGV("[%s] calling fillBuffer %u", 5946 mCodec->mComponentName.c_str(), info->mBufferID); 5947 5948 err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 5949 info->mFenceFd = -1; 5950 if (err != OK) { 5951 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5952 return true; 5953 } 5954 5955 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5956 break; 5957 } 5958 5959 sp<AMessage> reply = 5960 new AMessage(kWhatOutputBufferDrained, mCodec); 5961 5962 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 5963 // pretend that output format has changed on the first frame (we used to do this) 5964 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) { 5965 mCodec->onOutputFormatChanged(mCodec->mOutputFormat); 5966 } 5967 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 5968 mCodec->sendFormatChange(); 5969 } 5970 5971 if (mCodec->usingMetadataOnEncoderOutput()) { 5972 native_handle_t *handle = NULL; 5973 VideoNativeHandleMetadata &nativeMeta = 5974 *(VideoNativeHandleMetadata *)info->mData->data(); 5975 if (info->mData->size() >= sizeof(nativeMeta) 5976 && nativeMeta.eType == kMetadataBufferTypeNativeHandleSource) { 5977#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5978 // handle is only valid on 32-bit/mediaserver process 5979 handle = NULL; 5980#else 5981 handle = (native_handle_t *)nativeMeta.pHandle; 5982#endif 5983 } 5984 info->mData->meta()->setPointer("handle", handle); 5985 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 5986 info->mData->meta()->setInt32("rangeLength", rangeLength); 5987 } else if (info->mData == info->mCodecData) { 5988 info->mData->setRange(rangeOffset, rangeLength); 5989 } else { 5990 info->mCodecData->setRange(rangeOffset, rangeLength); 5991 // in this case we know that mConverter is not null 5992 status_t err = mCodec->mConverter[kPortIndexOutput]->convert( 5993 info->mCodecData, info->mData); 5994 if (err != OK) { 5995 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5996 return true; 5997 } 5998 } 5999#if 0 6000 if (mCodec->mNativeWindow == NULL) { 6001 if (IsIDR(info->mData)) { 6002 ALOGI("IDR frame"); 6003 } 6004 } 6005#endif 6006 6007 if (mCodec->mSkipCutBuffer != NULL) { 6008 mCodec->mSkipCutBuffer->submit(info->mData); 6009 } 6010 info->mData->meta()->setInt64("timeUs", timeUs); 6011 6012 sp<AMessage> notify = mCodec->mNotify->dup(); 6013 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 6014 notify->setInt32("buffer-id", info->mBufferID); 6015 notify->setBuffer("buffer", info->mData); 6016 notify->setInt32("flags", flags); 6017 6018 reply->setInt32("buffer-id", info->mBufferID); 6019 6020 notify->setMessage("reply", reply); 6021 6022 notify->post(); 6023 6024 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 6025 6026 if (flags & OMX_BUFFERFLAG_EOS) { 6027 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 6028 6029 sp<AMessage> notify = mCodec->mNotify->dup(); 6030 notify->setInt32("what", CodecBase::kWhatEOS); 6031 notify->setInt32("err", mCodec->mInputEOSResult); 6032 notify->post(); 6033 6034 mCodec->mPortEOS[kPortIndexOutput] = true; 6035 } 6036 break; 6037 } 6038 6039 case FREE_BUFFERS: 6040 err = mCodec->freeBuffer(kPortIndexOutput, index); 6041 if (err != OK) { 6042 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6043 return true; 6044 } 6045 break; 6046 6047 default: 6048 ALOGE("Invalid port mode: %d", mode); 6049 return false; 6050 } 6051 6052 return true; 6053} 6054 6055void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 6056 IOMX::buffer_id bufferID; 6057 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 6058 ssize_t index; 6059 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6060 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6061 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 6062 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6063 mCodec->dumpBuffers(kPortIndexOutput); 6064 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6065 return; 6066 } 6067 6068 android_native_rect_t crop; 6069 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom)) { 6070 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 6071 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 6072 } 6073 6074 int32_t dataSpace; 6075 if (msg->findInt32("dataspace", &dataSpace)) { 6076 status_t err = native_window_set_buffers_data_space( 6077 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 6078 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 6079 } 6080 6081 int32_t render; 6082 if (mCodec->mNativeWindow != NULL 6083 && msg->findInt32("render", &render) && render != 0 6084 && info->mData != NULL && info->mData->size() != 0) { 6085 ATRACE_NAME("render"); 6086 // The client wants this buffer to be rendered. 6087 6088 // save buffers sent to the surface so we can get render time when they return 6089 int64_t mediaTimeUs = -1; 6090 info->mData->meta()->findInt64("timeUs", &mediaTimeUs); 6091 if (mediaTimeUs >= 0) { 6092 mCodec->mRenderTracker.onFrameQueued( 6093 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 6094 } 6095 6096 int64_t timestampNs = 0; 6097 if (!msg->findInt64("timestampNs", ×tampNs)) { 6098 // use media timestamp if client did not request a specific render timestamp 6099 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 6100 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 6101 timestampNs *= 1000; 6102 } 6103 } 6104 6105 status_t err; 6106 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 6107 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 6108 6109 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 6110 err = mCodec->mNativeWindow->queueBuffer( 6111 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 6112 info->mFenceFd = -1; 6113 if (err == OK) { 6114 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 6115 } else { 6116 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 6117 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6118 info->mStatus = BufferInfo::OWNED_BY_US; 6119 // keeping read fence as write fence to avoid clobbering 6120 info->mIsReadFence = false; 6121 } 6122 } else { 6123 if (mCodec->mNativeWindow != NULL && 6124 (info->mData == NULL || info->mData->size() != 0)) { 6125 // move read fence into write fence to avoid clobbering 6126 info->mIsReadFence = false; 6127 ATRACE_NAME("frame-drop"); 6128 } 6129 info->mStatus = BufferInfo::OWNED_BY_US; 6130 } 6131 6132 PortMode mode = getPortMode(kPortIndexOutput); 6133 6134 switch (mode) { 6135 case KEEP_BUFFERS: 6136 { 6137 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 6138 6139 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6140 // We cannot resubmit the buffer we just rendered, dequeue 6141 // the spare instead. 6142 6143 info = mCodec->dequeueBufferFromNativeWindow(); 6144 } 6145 break; 6146 } 6147 6148 case RESUBMIT_BUFFERS: 6149 { 6150 if (!mCodec->mPortEOS[kPortIndexOutput]) { 6151 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6152 // We cannot resubmit the buffer we just rendered, dequeue 6153 // the spare instead. 6154 6155 info = mCodec->dequeueBufferFromNativeWindow(); 6156 } 6157 6158 if (info != NULL) { 6159 ALOGV("[%s] calling fillBuffer %u", 6160 mCodec->mComponentName.c_str(), info->mBufferID); 6161 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 6162 status_t err = mCodec->mOMX->fillBuffer( 6163 mCodec->mNode, info->mBufferID, info->mFenceFd); 6164 info->mFenceFd = -1; 6165 if (err == OK) { 6166 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6167 } else { 6168 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6169 } 6170 } 6171 } 6172 break; 6173 } 6174 6175 case FREE_BUFFERS: 6176 { 6177 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 6178 if (err != OK) { 6179 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6180 } 6181 break; 6182 } 6183 6184 default: 6185 ALOGE("Invalid port mode: %d", mode); 6186 return; 6187 } 6188} 6189 6190//////////////////////////////////////////////////////////////////////////////// 6191 6192ACodec::UninitializedState::UninitializedState(ACodec *codec) 6193 : BaseState(codec) { 6194} 6195 6196void ACodec::UninitializedState::stateEntered() { 6197 ALOGV("Now uninitialized"); 6198 6199 if (mDeathNotifier != NULL) { 6200 mCodec->mNodeBinder->unlinkToDeath(mDeathNotifier); 6201 mDeathNotifier.clear(); 6202 } 6203 6204 mCodec->mUsingNativeWindow = false; 6205 mCodec->mNativeWindow.clear(); 6206 mCodec->mNativeWindowUsageBits = 0; 6207 mCodec->mNode = 0; 6208 mCodec->mOMX.clear(); 6209 mCodec->mQuirks = 0; 6210 mCodec->mFlags = 0; 6211 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 6212 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 6213 mCodec->mConverter[0].clear(); 6214 mCodec->mConverter[1].clear(); 6215 mCodec->mComponentName.clear(); 6216} 6217 6218bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 6219 bool handled = false; 6220 6221 switch (msg->what()) { 6222 case ACodec::kWhatSetup: 6223 { 6224 onSetup(msg); 6225 6226 handled = true; 6227 break; 6228 } 6229 6230 case ACodec::kWhatAllocateComponent: 6231 { 6232 onAllocateComponent(msg); 6233 handled = true; 6234 break; 6235 } 6236 6237 case ACodec::kWhatShutdown: 6238 { 6239 int32_t keepComponentAllocated; 6240 CHECK(msg->findInt32( 6241 "keepComponentAllocated", &keepComponentAllocated)); 6242 ALOGW_IF(keepComponentAllocated, 6243 "cannot keep component allocated on shutdown in Uninitialized state"); 6244 6245 sp<AMessage> notify = mCodec->mNotify->dup(); 6246 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6247 notify->post(); 6248 6249 handled = true; 6250 break; 6251 } 6252 6253 case ACodec::kWhatFlush: 6254 { 6255 sp<AMessage> notify = mCodec->mNotify->dup(); 6256 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6257 notify->post(); 6258 6259 handled = true; 6260 break; 6261 } 6262 6263 case ACodec::kWhatReleaseCodecInstance: 6264 { 6265 // nothing to do, as we have already signaled shutdown 6266 handled = true; 6267 break; 6268 } 6269 6270 default: 6271 return BaseState::onMessageReceived(msg); 6272 } 6273 6274 return handled; 6275} 6276 6277void ACodec::UninitializedState::onSetup( 6278 const sp<AMessage> &msg) { 6279 if (onAllocateComponent(msg) 6280 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6281 mCodec->mLoadedState->onStart(); 6282 } 6283} 6284 6285bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6286 ALOGV("onAllocateComponent"); 6287 6288 CHECK(mCodec->mNode == 0); 6289 6290 OMXClient client; 6291 if (client.connect() != OK) { 6292 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6293 return false; 6294 } 6295 6296 sp<IOMX> omx = client.interface(); 6297 6298 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6299 6300 Vector<AString> matchingCodecs; 6301 6302 AString mime; 6303 6304 AString componentName; 6305 uint32_t quirks = 0; 6306 int32_t encoder = false; 6307 if (msg->findString("componentName", &componentName)) { 6308 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6309 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6310 matchingCodecs.add(componentName); 6311 } 6312 } else { 6313 CHECK(msg->findString("mime", &mime)); 6314 6315 if (!msg->findInt32("encoder", &encoder)) { 6316 encoder = false; 6317 } 6318 6319 MediaCodecList::findMatchingCodecs( 6320 mime.c_str(), 6321 encoder, // createEncoder 6322 0, // flags 6323 &matchingCodecs); 6324 } 6325 6326 sp<CodecObserver> observer = new CodecObserver; 6327 IOMX::node_id node = 0; 6328 6329 status_t err = NAME_NOT_FOUND; 6330 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6331 ++matchIndex) { 6332 componentName = matchingCodecs[matchIndex]; 6333 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6334 6335 pid_t tid = gettid(); 6336 int prevPriority = androidGetThreadPriority(tid); 6337 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6338 err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node); 6339 androidSetThreadPriority(tid, prevPriority); 6340 6341 if (err == OK) { 6342 break; 6343 } else { 6344 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6345 } 6346 6347 node = 0; 6348 } 6349 6350 if (node == 0) { 6351 if (!mime.empty()) { 6352 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6353 encoder ? "en" : "de", mime.c_str(), err); 6354 } else { 6355 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6356 } 6357 6358 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6359 return false; 6360 } 6361 6362 mDeathNotifier = new DeathNotifier(notify); 6363 if (mCodec->mNodeBinder == NULL || 6364 mCodec->mNodeBinder->linkToDeath(mDeathNotifier) != OK) { 6365 // This was a local binder, if it dies so do we, we won't care 6366 // about any notifications in the afterlife. 6367 mDeathNotifier.clear(); 6368 } 6369 6370 notify = new AMessage(kWhatOMXMessageList, mCodec); 6371 observer->setNotificationMessage(notify); 6372 6373 mCodec->mComponentName = componentName; 6374 mCodec->mRenderTracker.setComponentName(componentName); 6375 mCodec->mFlags = 0; 6376 6377 if (componentName.endsWith(".secure")) { 6378 mCodec->mFlags |= kFlagIsSecure; 6379 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6380 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6381 } 6382 6383 mCodec->mQuirks = quirks; 6384 mCodec->mOMX = omx; 6385 mCodec->mNode = node; 6386 6387 { 6388 sp<AMessage> notify = mCodec->mNotify->dup(); 6389 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6390 notify->setString("componentName", mCodec->mComponentName.c_str()); 6391 notify->post(); 6392 } 6393 6394 mCodec->changeState(mCodec->mLoadedState); 6395 6396 return true; 6397} 6398 6399//////////////////////////////////////////////////////////////////////////////// 6400 6401ACodec::LoadedState::LoadedState(ACodec *codec) 6402 : BaseState(codec) { 6403} 6404 6405void ACodec::LoadedState::stateEntered() { 6406 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6407 6408 mCodec->mPortEOS[kPortIndexInput] = 6409 mCodec->mPortEOS[kPortIndexOutput] = false; 6410 6411 mCodec->mInputEOSResult = OK; 6412 6413 mCodec->mDequeueCounter = 0; 6414 mCodec->mMetadataBuffersToSubmit = 0; 6415 mCodec->mRepeatFrameDelayUs = -1ll; 6416 mCodec->mInputFormat.clear(); 6417 mCodec->mOutputFormat.clear(); 6418 mCodec->mBaseOutputFormat.clear(); 6419 6420 if (mCodec->mShutdownInProgress) { 6421 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6422 6423 mCodec->mShutdownInProgress = false; 6424 mCodec->mKeepComponentAllocated = false; 6425 6426 onShutdown(keepComponentAllocated); 6427 } 6428 mCodec->mExplicitShutdown = false; 6429 6430 mCodec->processDeferredMessages(); 6431} 6432 6433void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6434 if (!keepComponentAllocated) { 6435 (void)mCodec->mOMX->freeNode(mCodec->mNode); 6436 6437 mCodec->changeState(mCodec->mUninitializedState); 6438 } 6439 6440 if (mCodec->mExplicitShutdown) { 6441 sp<AMessage> notify = mCodec->mNotify->dup(); 6442 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6443 notify->post(); 6444 mCodec->mExplicitShutdown = false; 6445 } 6446} 6447 6448bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6449 bool handled = false; 6450 6451 switch (msg->what()) { 6452 case ACodec::kWhatConfigureComponent: 6453 { 6454 onConfigureComponent(msg); 6455 handled = true; 6456 break; 6457 } 6458 6459 case ACodec::kWhatCreateInputSurface: 6460 { 6461 onCreateInputSurface(msg); 6462 handled = true; 6463 break; 6464 } 6465 6466 case ACodec::kWhatSetInputSurface: 6467 { 6468 onSetInputSurface(msg); 6469 handled = true; 6470 break; 6471 } 6472 6473 case ACodec::kWhatStart: 6474 { 6475 onStart(); 6476 handled = true; 6477 break; 6478 } 6479 6480 case ACodec::kWhatShutdown: 6481 { 6482 int32_t keepComponentAllocated; 6483 CHECK(msg->findInt32( 6484 "keepComponentAllocated", &keepComponentAllocated)); 6485 6486 mCodec->mExplicitShutdown = true; 6487 onShutdown(keepComponentAllocated); 6488 6489 handled = true; 6490 break; 6491 } 6492 6493 case ACodec::kWhatFlush: 6494 { 6495 sp<AMessage> notify = mCodec->mNotify->dup(); 6496 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6497 notify->post(); 6498 6499 handled = true; 6500 break; 6501 } 6502 6503 default: 6504 return BaseState::onMessageReceived(msg); 6505 } 6506 6507 return handled; 6508} 6509 6510bool ACodec::LoadedState::onConfigureComponent( 6511 const sp<AMessage> &msg) { 6512 ALOGV("onConfigureComponent"); 6513 6514 CHECK(mCodec->mNode != 0); 6515 6516 status_t err = OK; 6517 AString mime; 6518 if (!msg->findString("mime", &mime)) { 6519 err = BAD_VALUE; 6520 } else { 6521 err = mCodec->configureCodec(mime.c_str(), msg); 6522 } 6523 if (err != OK) { 6524 ALOGE("[%s] configureCodec returning error %d", 6525 mCodec->mComponentName.c_str(), err); 6526 6527 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6528 return false; 6529 } 6530 6531 { 6532 sp<AMessage> notify = mCodec->mNotify->dup(); 6533 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6534 notify->setMessage("input-format", mCodec->mInputFormat); 6535 notify->setMessage("output-format", mCodec->mOutputFormat); 6536 notify->post(); 6537 } 6538 6539 return true; 6540} 6541 6542status_t ACodec::LoadedState::setupInputSurface() { 6543 status_t err = OK; 6544 6545 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6546 err = mCodec->mOMX->setInternalOption( 6547 mCodec->mNode, 6548 kPortIndexInput, 6549 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 6550 &mCodec->mRepeatFrameDelayUs, 6551 sizeof(mCodec->mRepeatFrameDelayUs)); 6552 6553 if (err != OK) { 6554 ALOGE("[%s] Unable to configure option to repeat previous " 6555 "frames (err %d)", 6556 mCodec->mComponentName.c_str(), 6557 err); 6558 return err; 6559 } 6560 } 6561 6562 if (mCodec->mMaxPtsGapUs > 0ll) { 6563 err = mCodec->mOMX->setInternalOption( 6564 mCodec->mNode, 6565 kPortIndexInput, 6566 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 6567 &mCodec->mMaxPtsGapUs, 6568 sizeof(mCodec->mMaxPtsGapUs)); 6569 6570 if (err != OK) { 6571 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6572 mCodec->mComponentName.c_str(), 6573 err); 6574 return err; 6575 } 6576 } 6577 6578 if (mCodec->mMaxFps > 0) { 6579 err = mCodec->mOMX->setInternalOption( 6580 mCodec->mNode, 6581 kPortIndexInput, 6582 IOMX::INTERNAL_OPTION_MAX_FPS, 6583 &mCodec->mMaxFps, 6584 sizeof(mCodec->mMaxFps)); 6585 6586 if (err != OK) { 6587 ALOGE("[%s] Unable to configure max fps (err %d)", 6588 mCodec->mComponentName.c_str(), 6589 err); 6590 return err; 6591 } 6592 } 6593 6594 if (mCodec->mTimePerCaptureUs > 0ll 6595 && mCodec->mTimePerFrameUs > 0ll) { 6596 int64_t timeLapse[2]; 6597 timeLapse[0] = mCodec->mTimePerFrameUs; 6598 timeLapse[1] = mCodec->mTimePerCaptureUs; 6599 err = mCodec->mOMX->setInternalOption( 6600 mCodec->mNode, 6601 kPortIndexInput, 6602 IOMX::INTERNAL_OPTION_TIME_LAPSE, 6603 &timeLapse[0], 6604 sizeof(timeLapse)); 6605 6606 if (err != OK) { 6607 ALOGE("[%s] Unable to configure time lapse (err %d)", 6608 mCodec->mComponentName.c_str(), 6609 err); 6610 return err; 6611 } 6612 } 6613 6614 if (mCodec->mCreateInputBuffersSuspended) { 6615 bool suspend = true; 6616 err = mCodec->mOMX->setInternalOption( 6617 mCodec->mNode, 6618 kPortIndexInput, 6619 IOMX::INTERNAL_OPTION_SUSPEND, 6620 &suspend, 6621 sizeof(suspend)); 6622 6623 if (err != OK) { 6624 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6625 mCodec->mComponentName.c_str(), 6626 err); 6627 return err; 6628 } 6629 } 6630 6631 uint32_t usageBits; 6632 if (mCodec->mOMX->getParameter( 6633 mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6634 &usageBits, sizeof(usageBits)) == OK) { 6635 mCodec->mInputFormat->setInt32( 6636 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6637 } 6638 6639 sp<ABuffer> colorAspectsBuffer; 6640 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6641 err = mCodec->mOMX->setInternalOption( 6642 mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS, 6643 colorAspectsBuffer->base(), colorAspectsBuffer->capacity()); 6644 if (err != OK) { 6645 ALOGE("[%s] Unable to configure color aspects (err %d)", 6646 mCodec->mComponentName.c_str(), err); 6647 return err; 6648 } 6649 } 6650 return OK; 6651} 6652 6653void ACodec::LoadedState::onCreateInputSurface( 6654 const sp<AMessage> & /* msg */) { 6655 ALOGV("onCreateInputSurface"); 6656 6657 sp<AMessage> notify = mCodec->mNotify->dup(); 6658 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6659 6660 android_dataspace dataSpace; 6661 status_t err = 6662 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6663 notify->setMessage("input-format", mCodec->mInputFormat); 6664 notify->setMessage("output-format", mCodec->mOutputFormat); 6665 6666 sp<IGraphicBufferProducer> bufferProducer; 6667 if (err == OK) { 6668 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6669 err = mCodec->mOMX->createInputSurface( 6670 mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, 6671 &mCodec->mInputMetadataType); 6672 // framework uses ANW buffers internally instead of gralloc handles 6673 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6674 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6675 } 6676 } 6677 6678 if (err == OK) { 6679 err = setupInputSurface(); 6680 } 6681 6682 if (err == OK) { 6683 notify->setObject("input-surface", 6684 new BufferProducerWrapper(bufferProducer)); 6685 } else { 6686 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6687 // the error through because it's in the "configured" state. We 6688 // send a kWhatInputSurfaceCreated with an error value instead. 6689 ALOGE("[%s] onCreateInputSurface returning error %d", 6690 mCodec->mComponentName.c_str(), err); 6691 notify->setInt32("err", err); 6692 } 6693 notify->post(); 6694} 6695 6696void ACodec::LoadedState::onSetInputSurface( 6697 const sp<AMessage> &msg) { 6698 ALOGV("onSetInputSurface"); 6699 6700 sp<AMessage> notify = mCodec->mNotify->dup(); 6701 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6702 6703 sp<RefBase> obj; 6704 CHECK(msg->findObject("input-surface", &obj)); 6705 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6706 6707 android_dataspace dataSpace; 6708 status_t err = 6709 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6710 notify->setMessage("input-format", mCodec->mInputFormat); 6711 notify->setMessage("output-format", mCodec->mOutputFormat); 6712 6713 if (err == OK) { 6714 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6715 err = mCodec->mOMX->setInputSurface( 6716 mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(), 6717 &mCodec->mInputMetadataType); 6718 // framework uses ANW buffers internally instead of gralloc handles 6719 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6720 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6721 } 6722 } 6723 6724 if (err == OK) { 6725 surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace); 6726 err = setupInputSurface(); 6727 } 6728 6729 if (err != OK) { 6730 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6731 // the error through because it's in the "configured" state. We 6732 // send a kWhatInputSurfaceAccepted with an error value instead. 6733 ALOGE("[%s] onSetInputSurface returning error %d", 6734 mCodec->mComponentName.c_str(), err); 6735 notify->setInt32("err", err); 6736 } 6737 notify->post(); 6738} 6739 6740void ACodec::LoadedState::onStart() { 6741 ALOGV("onStart"); 6742 6743 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6744 if (err != OK) { 6745 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6746 } else { 6747 mCodec->changeState(mCodec->mLoadedToIdleState); 6748 } 6749} 6750 6751//////////////////////////////////////////////////////////////////////////////// 6752 6753ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 6754 : BaseState(codec) { 6755} 6756 6757void ACodec::LoadedToIdleState::stateEntered() { 6758 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 6759 6760 status_t err; 6761 if ((err = allocateBuffers()) != OK) { 6762 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 6763 "(error 0x%08x)", 6764 err); 6765 6766 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6767 6768 mCodec->mOMX->sendCommand( 6769 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 6770 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 6771 mCodec->freeBuffersOnPort(kPortIndexInput); 6772 } 6773 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 6774 mCodec->freeBuffersOnPort(kPortIndexOutput); 6775 } 6776 6777 mCodec->changeState(mCodec->mLoadedState); 6778 } 6779} 6780 6781status_t ACodec::LoadedToIdleState::allocateBuffers() { 6782 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 6783 6784 if (err != OK) { 6785 return err; 6786 } 6787 6788 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 6789} 6790 6791bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 6792 switch (msg->what()) { 6793 case kWhatSetParameters: 6794 case kWhatShutdown: 6795 { 6796 mCodec->deferMessage(msg); 6797 return true; 6798 } 6799 6800 case kWhatSignalEndOfInputStream: 6801 { 6802 mCodec->onSignalEndOfInputStream(); 6803 return true; 6804 } 6805 6806 case kWhatResume: 6807 { 6808 // We'll be active soon enough. 6809 return true; 6810 } 6811 6812 case kWhatFlush: 6813 { 6814 // We haven't even started yet, so we're flushed alright... 6815 sp<AMessage> notify = mCodec->mNotify->dup(); 6816 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6817 notify->post(); 6818 return true; 6819 } 6820 6821 default: 6822 return BaseState::onMessageReceived(msg); 6823 } 6824} 6825 6826bool ACodec::LoadedToIdleState::onOMXEvent( 6827 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6828 switch (event) { 6829 case OMX_EventCmdComplete: 6830 { 6831 status_t err = OK; 6832 if (data1 != (OMX_U32)OMX_CommandStateSet 6833 || data2 != (OMX_U32)OMX_StateIdle) { 6834 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 6835 asString((OMX_COMMANDTYPE)data1), data1, 6836 asString((OMX_STATETYPE)data2), data2); 6837 err = FAILED_TRANSACTION; 6838 } 6839 6840 if (err == OK) { 6841 err = mCodec->mOMX->sendCommand( 6842 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting); 6843 } 6844 6845 if (err != OK) { 6846 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6847 } else { 6848 mCodec->changeState(mCodec->mIdleToExecutingState); 6849 } 6850 6851 return true; 6852 } 6853 6854 default: 6855 return BaseState::onOMXEvent(event, data1, data2); 6856 } 6857} 6858 6859//////////////////////////////////////////////////////////////////////////////// 6860 6861ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 6862 : BaseState(codec) { 6863} 6864 6865void ACodec::IdleToExecutingState::stateEntered() { 6866 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 6867} 6868 6869bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6870 switch (msg->what()) { 6871 case kWhatSetParameters: 6872 case kWhatShutdown: 6873 { 6874 mCodec->deferMessage(msg); 6875 return true; 6876 } 6877 6878 case kWhatResume: 6879 { 6880 // We'll be active soon enough. 6881 return true; 6882 } 6883 6884 case kWhatFlush: 6885 { 6886 // We haven't even started yet, so we're flushed alright... 6887 sp<AMessage> notify = mCodec->mNotify->dup(); 6888 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6889 notify->post(); 6890 6891 return true; 6892 } 6893 6894 case kWhatSignalEndOfInputStream: 6895 { 6896 mCodec->onSignalEndOfInputStream(); 6897 return true; 6898 } 6899 6900 default: 6901 return BaseState::onMessageReceived(msg); 6902 } 6903} 6904 6905bool ACodec::IdleToExecutingState::onOMXEvent( 6906 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6907 switch (event) { 6908 case OMX_EventCmdComplete: 6909 { 6910 if (data1 != (OMX_U32)OMX_CommandStateSet 6911 || data2 != (OMX_U32)OMX_StateExecuting) { 6912 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 6913 asString((OMX_COMMANDTYPE)data1), data1, 6914 asString((OMX_STATETYPE)data2), data2); 6915 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6916 return true; 6917 } 6918 6919 mCodec->mExecutingState->resume(); 6920 mCodec->changeState(mCodec->mExecutingState); 6921 6922 return true; 6923 } 6924 6925 default: 6926 return BaseState::onOMXEvent(event, data1, data2); 6927 } 6928} 6929 6930//////////////////////////////////////////////////////////////////////////////// 6931 6932ACodec::ExecutingState::ExecutingState(ACodec *codec) 6933 : BaseState(codec), 6934 mActive(false) { 6935} 6936 6937ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 6938 OMX_U32 /* portIndex */) { 6939 return RESUBMIT_BUFFERS; 6940} 6941 6942void ACodec::ExecutingState::submitOutputMetaBuffers() { 6943 // submit as many buffers as there are input buffers with the codec 6944 // in case we are in port reconfiguring 6945 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 6946 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6947 6948 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 6949 if (mCodec->submitOutputMetadataBuffer() != OK) 6950 break; 6951 } 6952 } 6953 6954 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 6955 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 6956} 6957 6958void ACodec::ExecutingState::submitRegularOutputBuffers() { 6959 bool failed = false; 6960 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 6961 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 6962 6963 if (mCodec->mNativeWindow != NULL) { 6964 if (info->mStatus != BufferInfo::OWNED_BY_US 6965 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6966 ALOGE("buffers should be owned by us or the surface"); 6967 failed = true; 6968 break; 6969 } 6970 6971 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6972 continue; 6973 } 6974 } else { 6975 if (info->mStatus != BufferInfo::OWNED_BY_US) { 6976 ALOGE("buffers should be owned by us"); 6977 failed = true; 6978 break; 6979 } 6980 } 6981 6982 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 6983 6984 info->checkWriteFence("submitRegularOutputBuffers"); 6985 status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 6986 info->mFenceFd = -1; 6987 if (err != OK) { 6988 failed = true; 6989 break; 6990 } 6991 6992 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6993 } 6994 6995 if (failed) { 6996 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6997 } 6998} 6999 7000void ACodec::ExecutingState::submitOutputBuffers() { 7001 submitRegularOutputBuffers(); 7002 if (mCodec->storingMetadataInDecodedBuffers()) { 7003 submitOutputMetaBuffers(); 7004 } 7005} 7006 7007void ACodec::ExecutingState::resume() { 7008 if (mActive) { 7009 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 7010 return; 7011 } 7012 7013 submitOutputBuffers(); 7014 7015 // Post all available input buffers 7016 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 7017 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 7018 } 7019 7020 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 7021 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7022 if (info->mStatus == BufferInfo::OWNED_BY_US) { 7023 postFillThisBuffer(info); 7024 } 7025 } 7026 7027 mActive = true; 7028} 7029 7030void ACodec::ExecutingState::stateEntered() { 7031 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 7032 7033 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7034 mCodec->processDeferredMessages(); 7035} 7036 7037bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 7038 bool handled = false; 7039 7040 switch (msg->what()) { 7041 case kWhatShutdown: 7042 { 7043 int32_t keepComponentAllocated; 7044 CHECK(msg->findInt32( 7045 "keepComponentAllocated", &keepComponentAllocated)); 7046 7047 mCodec->mShutdownInProgress = true; 7048 mCodec->mExplicitShutdown = true; 7049 mCodec->mKeepComponentAllocated = keepComponentAllocated; 7050 7051 mActive = false; 7052 7053 status_t err = mCodec->mOMX->sendCommand( 7054 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 7055 if (err != OK) { 7056 if (keepComponentAllocated) { 7057 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7058 } 7059 // TODO: do some recovery here. 7060 } else { 7061 mCodec->changeState(mCodec->mExecutingToIdleState); 7062 } 7063 7064 handled = true; 7065 break; 7066 } 7067 7068 case kWhatFlush: 7069 { 7070 ALOGV("[%s] ExecutingState flushing now " 7071 "(codec owns %zu/%zu input, %zu/%zu output).", 7072 mCodec->mComponentName.c_str(), 7073 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 7074 mCodec->mBuffers[kPortIndexInput].size(), 7075 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 7076 mCodec->mBuffers[kPortIndexOutput].size()); 7077 7078 mActive = false; 7079 7080 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL); 7081 if (err != OK) { 7082 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7083 } else { 7084 mCodec->changeState(mCodec->mFlushingState); 7085 } 7086 7087 handled = true; 7088 break; 7089 } 7090 7091 case kWhatResume: 7092 { 7093 resume(); 7094 7095 handled = true; 7096 break; 7097 } 7098 7099 case kWhatRequestIDRFrame: 7100 { 7101 status_t err = mCodec->requestIDRFrame(); 7102 if (err != OK) { 7103 ALOGW("Requesting an IDR frame failed."); 7104 } 7105 7106 handled = true; 7107 break; 7108 } 7109 7110 case kWhatSetParameters: 7111 { 7112 sp<AMessage> params; 7113 CHECK(msg->findMessage("params", ¶ms)); 7114 7115 status_t err = mCodec->setParameters(params); 7116 7117 sp<AMessage> reply; 7118 if (msg->findMessage("reply", &reply)) { 7119 reply->setInt32("err", err); 7120 reply->post(); 7121 } 7122 7123 handled = true; 7124 break; 7125 } 7126 7127 case ACodec::kWhatSignalEndOfInputStream: 7128 { 7129 mCodec->onSignalEndOfInputStream(); 7130 handled = true; 7131 break; 7132 } 7133 7134 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7135 case kWhatSubmitOutputMetadataBufferIfEOS: 7136 { 7137 if (mCodec->mPortEOS[kPortIndexInput] && 7138 !mCodec->mPortEOS[kPortIndexOutput]) { 7139 status_t err = mCodec->submitOutputMetadataBuffer(); 7140 if (err == OK) { 7141 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7142 } 7143 } 7144 return true; 7145 } 7146 7147 default: 7148 handled = BaseState::onMessageReceived(msg); 7149 break; 7150 } 7151 7152 return handled; 7153} 7154 7155status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 7156 int32_t videoBitrate; 7157 if (params->findInt32("video-bitrate", &videoBitrate)) { 7158 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 7159 InitOMXParams(&configParams); 7160 configParams.nPortIndex = kPortIndexOutput; 7161 configParams.nEncodeBitrate = videoBitrate; 7162 7163 status_t err = mOMX->setConfig( 7164 mNode, 7165 OMX_IndexConfigVideoBitrate, 7166 &configParams, 7167 sizeof(configParams)); 7168 7169 if (err != OK) { 7170 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 7171 videoBitrate, err); 7172 7173 return err; 7174 } 7175 } 7176 7177 int64_t skipFramesBeforeUs; 7178 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 7179 status_t err = 7180 mOMX->setInternalOption( 7181 mNode, 7182 kPortIndexInput, 7183 IOMX::INTERNAL_OPTION_START_TIME, 7184 &skipFramesBeforeUs, 7185 sizeof(skipFramesBeforeUs)); 7186 7187 if (err != OK) { 7188 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 7189 return err; 7190 } 7191 } 7192 7193 int32_t dropInputFrames; 7194 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 7195 bool suspend = dropInputFrames != 0; 7196 7197 status_t err = 7198 mOMX->setInternalOption( 7199 mNode, 7200 kPortIndexInput, 7201 IOMX::INTERNAL_OPTION_SUSPEND, 7202 &suspend, 7203 sizeof(suspend)); 7204 7205 if (err != OK) { 7206 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 7207 return err; 7208 } 7209 } 7210 7211 int32_t dummy; 7212 if (params->findInt32("request-sync", &dummy)) { 7213 status_t err = requestIDRFrame(); 7214 7215 if (err != OK) { 7216 ALOGE("Requesting a sync frame failed w/ err %d", err); 7217 return err; 7218 } 7219 } 7220 7221 float rate; 7222 if (params->findFloat("operating-rate", &rate) && rate > 0) { 7223 status_t err = setOperatingRate(rate, mIsVideo); 7224 if (err != OK) { 7225 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 7226 return err; 7227 } 7228 } 7229 7230 int32_t intraRefreshPeriod = 0; 7231 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 7232 && intraRefreshPeriod > 0) { 7233 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 7234 if (err != OK) { 7235 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 7236 mComponentName.c_str()); 7237 err = OK; 7238 } 7239 } 7240 7241 return OK; 7242} 7243 7244void ACodec::onSignalEndOfInputStream() { 7245 sp<AMessage> notify = mNotify->dup(); 7246 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 7247 7248 status_t err = mOMX->signalEndOfInputStream(mNode); 7249 if (err != OK) { 7250 notify->setInt32("err", err); 7251 } 7252 notify->post(); 7253} 7254 7255bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 7256 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7257 return true; 7258} 7259 7260bool ACodec::ExecutingState::onOMXEvent( 7261 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7262 switch (event) { 7263 case OMX_EventPortSettingsChanged: 7264 { 7265 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 7266 7267 mCodec->onOutputFormatChanged(); 7268 7269 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 7270 mCodec->mMetadataBuffersToSubmit = 0; 7271 CHECK_EQ(mCodec->mOMX->sendCommand( 7272 mCodec->mNode, 7273 OMX_CommandPortDisable, kPortIndexOutput), 7274 (status_t)OK); 7275 7276 mCodec->freeOutputBuffersNotOwnedByComponent(); 7277 7278 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7279 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7280 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7281 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7282 mCodec->mComponentName.c_str(), data2); 7283 } 7284 7285 return true; 7286 } 7287 7288 case OMX_EventBufferFlag: 7289 { 7290 return true; 7291 } 7292 7293 default: 7294 return BaseState::onOMXEvent(event, data1, data2); 7295 } 7296} 7297 7298//////////////////////////////////////////////////////////////////////////////// 7299 7300ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7301 ACodec *codec) 7302 : BaseState(codec) { 7303} 7304 7305ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7306 OMX_U32 portIndex) { 7307 if (portIndex == kPortIndexOutput) { 7308 return FREE_BUFFERS; 7309 } 7310 7311 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7312 7313 return RESUBMIT_BUFFERS; 7314} 7315 7316bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7317 const sp<AMessage> &msg) { 7318 bool handled = false; 7319 7320 switch (msg->what()) { 7321 case kWhatFlush: 7322 case kWhatShutdown: 7323 case kWhatResume: 7324 case kWhatSetParameters: 7325 { 7326 if (msg->what() == kWhatResume) { 7327 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7328 } 7329 7330 mCodec->deferMessage(msg); 7331 handled = true; 7332 break; 7333 } 7334 7335 default: 7336 handled = BaseState::onMessageReceived(msg); 7337 break; 7338 } 7339 7340 return handled; 7341} 7342 7343void ACodec::OutputPortSettingsChangedState::stateEntered() { 7344 ALOGV("[%s] Now handling output port settings change", 7345 mCodec->mComponentName.c_str()); 7346} 7347 7348bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7349 int64_t mediaTimeUs, nsecs_t systemNano) { 7350 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7351 return true; 7352} 7353 7354bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7355 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7356 switch (event) { 7357 case OMX_EventCmdComplete: 7358 { 7359 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7360 if (data2 != (OMX_U32)kPortIndexOutput) { 7361 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7362 return false; 7363 } 7364 7365 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7366 7367 status_t err = OK; 7368 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7369 ALOGE("disabled port should be empty, but has %zu buffers", 7370 mCodec->mBuffers[kPortIndexOutput].size()); 7371 err = FAILED_TRANSACTION; 7372 } else { 7373 mCodec->mDealer[kPortIndexOutput].clear(); 7374 } 7375 7376 if (err == OK) { 7377 err = mCodec->mOMX->sendCommand( 7378 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput); 7379 } 7380 7381 if (err == OK) { 7382 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7383 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7384 "reconfiguration: (%d)", err); 7385 } 7386 7387 if (err != OK) { 7388 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7389 7390 // This is technically not correct, but appears to be 7391 // the only way to free the component instance. 7392 // Controlled transitioning from excecuting->idle 7393 // and idle->loaded seem impossible probably because 7394 // the output port never finishes re-enabling. 7395 mCodec->mShutdownInProgress = true; 7396 mCodec->mKeepComponentAllocated = false; 7397 mCodec->changeState(mCodec->mLoadedState); 7398 } 7399 7400 return true; 7401 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7402 if (data2 != (OMX_U32)kPortIndexOutput) { 7403 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7404 return false; 7405 } 7406 7407 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7408 7409 if (mCodec->mExecutingState->active()) { 7410 mCodec->mExecutingState->submitOutputBuffers(); 7411 } 7412 7413 mCodec->changeState(mCodec->mExecutingState); 7414 7415 return true; 7416 } 7417 7418 return false; 7419 } 7420 7421 default: 7422 return false; 7423 } 7424} 7425 7426//////////////////////////////////////////////////////////////////////////////// 7427 7428ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7429 : BaseState(codec), 7430 mComponentNowIdle(false) { 7431} 7432 7433bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7434 bool handled = false; 7435 7436 switch (msg->what()) { 7437 case kWhatFlush: 7438 { 7439 // Don't send me a flush request if you previously wanted me 7440 // to shutdown. 7441 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7442 break; 7443 } 7444 7445 case kWhatShutdown: 7446 { 7447 // We're already doing that... 7448 7449 handled = true; 7450 break; 7451 } 7452 7453 default: 7454 handled = BaseState::onMessageReceived(msg); 7455 break; 7456 } 7457 7458 return handled; 7459} 7460 7461void ACodec::ExecutingToIdleState::stateEntered() { 7462 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7463 7464 mComponentNowIdle = false; 7465 mCodec->mLastOutputFormat.clear(); 7466} 7467 7468bool ACodec::ExecutingToIdleState::onOMXEvent( 7469 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7470 switch (event) { 7471 case OMX_EventCmdComplete: 7472 { 7473 if (data1 != (OMX_U32)OMX_CommandStateSet 7474 || data2 != (OMX_U32)OMX_StateIdle) { 7475 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7476 asString((OMX_COMMANDTYPE)data1), data1, 7477 asString((OMX_STATETYPE)data2), data2); 7478 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7479 return true; 7480 } 7481 7482 mComponentNowIdle = true; 7483 7484 changeStateIfWeOwnAllBuffers(); 7485 7486 return true; 7487 } 7488 7489 case OMX_EventPortSettingsChanged: 7490 case OMX_EventBufferFlag: 7491 { 7492 // We're shutting down and don't care about this anymore. 7493 return true; 7494 } 7495 7496 default: 7497 return BaseState::onOMXEvent(event, data1, data2); 7498 } 7499} 7500 7501void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7502 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7503 status_t err = mCodec->mOMX->sendCommand( 7504 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7505 if (err == OK) { 7506 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7507 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7508 if (err == OK) { 7509 err = err2; 7510 } 7511 } 7512 7513 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7514 && mCodec->mNativeWindow != NULL) { 7515 // We push enough 1x1 blank buffers to ensure that one of 7516 // them has made it to the display. This allows the OMX 7517 // component teardown to zero out any protected buffers 7518 // without the risk of scanning out one of those buffers. 7519 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7520 } 7521 7522 if (err != OK) { 7523 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7524 return; 7525 } 7526 7527 mCodec->changeState(mCodec->mIdleToLoadedState); 7528 } 7529} 7530 7531void ACodec::ExecutingToIdleState::onInputBufferFilled( 7532 const sp<AMessage> &msg) { 7533 BaseState::onInputBufferFilled(msg); 7534 7535 changeStateIfWeOwnAllBuffers(); 7536} 7537 7538void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7539 const sp<AMessage> &msg) { 7540 BaseState::onOutputBufferDrained(msg); 7541 7542 changeStateIfWeOwnAllBuffers(); 7543} 7544 7545//////////////////////////////////////////////////////////////////////////////// 7546 7547ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7548 : BaseState(codec) { 7549} 7550 7551bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7552 bool handled = false; 7553 7554 switch (msg->what()) { 7555 case kWhatShutdown: 7556 { 7557 // We're already doing that... 7558 7559 handled = true; 7560 break; 7561 } 7562 7563 case kWhatFlush: 7564 { 7565 // Don't send me a flush request if you previously wanted me 7566 // to shutdown. 7567 ALOGE("Got flush request in IdleToLoadedState"); 7568 break; 7569 } 7570 7571 default: 7572 handled = BaseState::onMessageReceived(msg); 7573 break; 7574 } 7575 7576 return handled; 7577} 7578 7579void ACodec::IdleToLoadedState::stateEntered() { 7580 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7581} 7582 7583bool ACodec::IdleToLoadedState::onOMXEvent( 7584 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7585 switch (event) { 7586 case OMX_EventCmdComplete: 7587 { 7588 if (data1 != (OMX_U32)OMX_CommandStateSet 7589 || data2 != (OMX_U32)OMX_StateLoaded) { 7590 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7591 asString((OMX_COMMANDTYPE)data1), data1, 7592 asString((OMX_STATETYPE)data2), data2); 7593 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7594 return true; 7595 } 7596 7597 mCodec->changeState(mCodec->mLoadedState); 7598 7599 return true; 7600 } 7601 7602 default: 7603 return BaseState::onOMXEvent(event, data1, data2); 7604 } 7605} 7606 7607//////////////////////////////////////////////////////////////////////////////// 7608 7609ACodec::FlushingState::FlushingState(ACodec *codec) 7610 : BaseState(codec) { 7611} 7612 7613void ACodec::FlushingState::stateEntered() { 7614 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7615 7616 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7617} 7618 7619bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7620 bool handled = false; 7621 7622 switch (msg->what()) { 7623 case kWhatShutdown: 7624 { 7625 mCodec->deferMessage(msg); 7626 break; 7627 } 7628 7629 case kWhatFlush: 7630 { 7631 // We're already doing this right now. 7632 handled = true; 7633 break; 7634 } 7635 7636 default: 7637 handled = BaseState::onMessageReceived(msg); 7638 break; 7639 } 7640 7641 return handled; 7642} 7643 7644bool ACodec::FlushingState::onOMXEvent( 7645 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7646 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7647 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7648 7649 switch (event) { 7650 case OMX_EventCmdComplete: 7651 { 7652 if (data1 != (OMX_U32)OMX_CommandFlush) { 7653 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7654 asString((OMX_COMMANDTYPE)data1), data1, data2); 7655 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7656 return true; 7657 } 7658 7659 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7660 if (mFlushComplete[data2]) { 7661 ALOGW("Flush already completed for %s port", 7662 data2 == kPortIndexInput ? "input" : "output"); 7663 return true; 7664 } 7665 mFlushComplete[data2] = true; 7666 7667 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7668 changeStateIfWeOwnAllBuffers(); 7669 } 7670 } else if (data2 == OMX_ALL) { 7671 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7672 ALOGW("received flush complete event for OMX_ALL before ports have been" 7673 "flushed (%d/%d)", 7674 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7675 return false; 7676 } 7677 7678 changeStateIfWeOwnAllBuffers(); 7679 } else { 7680 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7681 } 7682 7683 return true; 7684 } 7685 7686 case OMX_EventPortSettingsChanged: 7687 { 7688 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7689 msg->setInt32("type", omx_message::EVENT); 7690 msg->setInt32("node", mCodec->mNode); 7691 msg->setInt32("event", event); 7692 msg->setInt32("data1", data1); 7693 msg->setInt32("data2", data2); 7694 7695 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7696 mCodec->mComponentName.c_str()); 7697 7698 mCodec->deferMessage(msg); 7699 7700 return true; 7701 } 7702 7703 default: 7704 return BaseState::onOMXEvent(event, data1, data2); 7705 } 7706 7707 return true; 7708} 7709 7710void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7711 BaseState::onOutputBufferDrained(msg); 7712 7713 changeStateIfWeOwnAllBuffers(); 7714} 7715 7716void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7717 BaseState::onInputBufferFilled(msg); 7718 7719 changeStateIfWeOwnAllBuffers(); 7720} 7721 7722void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 7723 if (mFlushComplete[kPortIndexInput] 7724 && mFlushComplete[kPortIndexOutput] 7725 && mCodec->allYourBuffersAreBelongToUs()) { 7726 // We now own all buffers except possibly those still queued with 7727 // the native window for rendering. Let's get those back as well. 7728 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 7729 7730 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7731 7732 sp<AMessage> notify = mCodec->mNotify->dup(); 7733 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7734 notify->post(); 7735 7736 mCodec->mPortEOS[kPortIndexInput] = 7737 mCodec->mPortEOS[kPortIndexOutput] = false; 7738 7739 mCodec->mInputEOSResult = OK; 7740 7741 if (mCodec->mSkipCutBuffer != NULL) { 7742 mCodec->mSkipCutBuffer->clear(); 7743 } 7744 7745 mCodec->changeState(mCodec->mExecutingState); 7746 } 7747} 7748 7749status_t ACodec::queryCapabilities( 7750 const AString &name, const AString &mime, bool isEncoder, 7751 sp<MediaCodecInfo::Capabilities> *caps) { 7752 (*caps).clear(); 7753 const char *role = getComponentRole(isEncoder, mime.c_str()); 7754 if (role == NULL) { 7755 return BAD_VALUE; 7756 } 7757 7758 OMXClient client; 7759 status_t err = client.connect(); 7760 if (err != OK) { 7761 return err; 7762 } 7763 7764 sp<IOMX> omx = client.interface(); 7765 sp<CodecObserver> observer = new CodecObserver; 7766 IOMX::node_id node = 0; 7767 7768 err = omx->allocateNode(name.c_str(), observer, NULL, &node); 7769 if (err != OK) { 7770 client.disconnect(); 7771 return err; 7772 } 7773 7774 err = setComponentRole(omx, node, role); 7775 if (err != OK) { 7776 omx->freeNode(node); 7777 client.disconnect(); 7778 return err; 7779 } 7780 7781 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 7782 bool isVideo = mime.startsWithIgnoreCase("video/"); 7783 7784 if (isVideo) { 7785 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 7786 InitOMXParams(¶m); 7787 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7788 7789 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7790 param.nProfileIndex = index; 7791 status_t err = omx->getParameter( 7792 node, OMX_IndexParamVideoProfileLevelQuerySupported, 7793 ¶m, sizeof(param)); 7794 if (err != OK) { 7795 break; 7796 } 7797 builder->addProfileLevel(param.eProfile, param.eLevel); 7798 7799 if (index == kMaxIndicesToCheck) { 7800 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 7801 name.c_str(), index, 7802 param.eProfile, param.eLevel); 7803 } 7804 } 7805 7806 // Color format query 7807 // return colors in the order reported by the OMX component 7808 // prefix "flexible" standard ones with the flexible equivalent 7809 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 7810 InitOMXParams(&portFormat); 7811 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 7812 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 7813 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7814 portFormat.nIndex = index; 7815 status_t err = omx->getParameter( 7816 node, OMX_IndexParamVideoPortFormat, 7817 &portFormat, sizeof(portFormat)); 7818 if (err != OK) { 7819 break; 7820 } 7821 7822 OMX_U32 flexibleEquivalent; 7823 if (isFlexibleColorFormat( 7824 omx, node, portFormat.eColorFormat, false /* usingNativeWindow */, 7825 &flexibleEquivalent)) { 7826 bool marked = false; 7827 for (size_t i = 0; i < supportedColors.size(); ++i) { 7828 if (supportedColors[i] == flexibleEquivalent) { 7829 marked = true; 7830 break; 7831 } 7832 } 7833 if (!marked) { 7834 supportedColors.push(flexibleEquivalent); 7835 builder->addColorFormat(flexibleEquivalent); 7836 } 7837 } 7838 supportedColors.push(portFormat.eColorFormat); 7839 builder->addColorFormat(portFormat.eColorFormat); 7840 7841 if (index == kMaxIndicesToCheck) { 7842 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 7843 name.c_str(), index, 7844 asString(portFormat.eColorFormat), portFormat.eColorFormat); 7845 } 7846 } 7847 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 7848 // More audio codecs if they have profiles. 7849 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 7850 InitOMXParams(¶m); 7851 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7852 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7853 param.nProfileIndex = index; 7854 status_t err = omx->getParameter( 7855 node, (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 7856 ¶m, sizeof(param)); 7857 if (err != OK) { 7858 break; 7859 } 7860 // For audio, level is ignored. 7861 builder->addProfileLevel(param.eProfile, 0 /* level */); 7862 7863 if (index == kMaxIndicesToCheck) { 7864 ALOGW("[%s] stopping checking profiles after %u: %x", 7865 name.c_str(), index, 7866 param.eProfile); 7867 } 7868 } 7869 7870 // NOTE: Without Android extensions, OMX does not provide a way to query 7871 // AAC profile support 7872 if (param.nProfileIndex == 0) { 7873 ALOGW("component %s doesn't support profile query.", name.c_str()); 7874 } 7875 } 7876 7877 if (isVideo && !isEncoder) { 7878 native_handle_t *sidebandHandle = NULL; 7879 if (omx->configureVideoTunnelMode( 7880 node, kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 7881 // tunneled playback includes adaptive playback 7882 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 7883 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 7884 } else if (omx->storeMetaDataInBuffers( 7885 node, kPortIndexOutput, OMX_TRUE) == OK || 7886 omx->prepareForAdaptivePlayback( 7887 node, kPortIndexOutput, OMX_TRUE, 7888 1280 /* width */, 720 /* height */) == OK) { 7889 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 7890 } 7891 } 7892 7893 if (isVideo && isEncoder) { 7894 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 7895 InitOMXParams(¶ms); 7896 params.nPortIndex = kPortIndexOutput; 7897 // TODO: should we verify if fallback is supported? 7898 if (omx->getConfig( 7899 node, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 7900 ¶ms, sizeof(params)) == OK) { 7901 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 7902 } 7903 } 7904 7905 *caps = builder; 7906 omx->freeNode(node); 7907 client.disconnect(); 7908 return OK; 7909} 7910 7911// These are supposed be equivalent to the logic in 7912// "audio_channel_out_mask_from_count". 7913//static 7914status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 7915 switch (numChannels) { 7916 case 1: 7917 map[0] = OMX_AUDIO_ChannelCF; 7918 break; 7919 case 2: 7920 map[0] = OMX_AUDIO_ChannelLF; 7921 map[1] = OMX_AUDIO_ChannelRF; 7922 break; 7923 case 3: 7924 map[0] = OMX_AUDIO_ChannelLF; 7925 map[1] = OMX_AUDIO_ChannelRF; 7926 map[2] = OMX_AUDIO_ChannelCF; 7927 break; 7928 case 4: 7929 map[0] = OMX_AUDIO_ChannelLF; 7930 map[1] = OMX_AUDIO_ChannelRF; 7931 map[2] = OMX_AUDIO_ChannelLR; 7932 map[3] = OMX_AUDIO_ChannelRR; 7933 break; 7934 case 5: 7935 map[0] = OMX_AUDIO_ChannelLF; 7936 map[1] = OMX_AUDIO_ChannelRF; 7937 map[2] = OMX_AUDIO_ChannelCF; 7938 map[3] = OMX_AUDIO_ChannelLR; 7939 map[4] = OMX_AUDIO_ChannelRR; 7940 break; 7941 case 6: 7942 map[0] = OMX_AUDIO_ChannelLF; 7943 map[1] = OMX_AUDIO_ChannelRF; 7944 map[2] = OMX_AUDIO_ChannelCF; 7945 map[3] = OMX_AUDIO_ChannelLFE; 7946 map[4] = OMX_AUDIO_ChannelLR; 7947 map[5] = OMX_AUDIO_ChannelRR; 7948 break; 7949 case 7: 7950 map[0] = OMX_AUDIO_ChannelLF; 7951 map[1] = OMX_AUDIO_ChannelRF; 7952 map[2] = OMX_AUDIO_ChannelCF; 7953 map[3] = OMX_AUDIO_ChannelLFE; 7954 map[4] = OMX_AUDIO_ChannelLR; 7955 map[5] = OMX_AUDIO_ChannelRR; 7956 map[6] = OMX_AUDIO_ChannelCS; 7957 break; 7958 case 8: 7959 map[0] = OMX_AUDIO_ChannelLF; 7960 map[1] = OMX_AUDIO_ChannelRF; 7961 map[2] = OMX_AUDIO_ChannelCF; 7962 map[3] = OMX_AUDIO_ChannelLFE; 7963 map[4] = OMX_AUDIO_ChannelLR; 7964 map[5] = OMX_AUDIO_ChannelRR; 7965 map[6] = OMX_AUDIO_ChannelLS; 7966 map[7] = OMX_AUDIO_ChannelRS; 7967 break; 7968 default: 7969 return -EINVAL; 7970 } 7971 7972 return OK; 7973} 7974 7975} // namespace android 7976