ACodec.cpp revision db2d2066e6e3f3ce728c2b17ed0923b02834519b
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47 48#include <OMX_AudioExt.h> 49#include <OMX_VideoExt.h> 50#include <OMX_Component.h> 51#include <OMX_IndexExt.h> 52#include <OMX_AsString.h> 53 54#include "include/avc_utils.h" 55#include "include/DataConverter.h" 56#include "omx/OMXUtils.h" 57 58namespace android { 59 60enum { 61 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 62}; 63 64// OMX errors are directly mapped into status_t range if 65// there is no corresponding MediaError status code. 66// Use the statusFromOMXError(int32_t omxError) function. 67// 68// Currently this is a direct map. 69// See frameworks/native/include/media/openmax/OMX_Core.h 70// 71// Vendor OMX errors from 0x90000000 - 0x9000FFFF 72// Extension OMX errors from 0x8F000000 - 0x90000000 73// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 74// 75 76// returns true if err is a recognized OMX error code. 77// as OMX error is OMX_S32, this is an int32_t type 78static inline bool isOMXError(int32_t err) { 79 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 80} 81 82// converts an OMX error to a status_t 83static inline status_t statusFromOMXError(int32_t omxError) { 84 switch (omxError) { 85 case OMX_ErrorInvalidComponentName: 86 case OMX_ErrorComponentNotFound: 87 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 88 default: 89 return isOMXError(omxError) ? omxError : 0; // no translation required 90 } 91} 92 93// checks and converts status_t to a non-side-effect status_t 94static inline status_t makeNoSideEffectStatus(status_t err) { 95 switch (err) { 96 // the following errors have side effects and may come 97 // from other code modules. Remap for safety reasons. 98 case INVALID_OPERATION: 99 case DEAD_OBJECT: 100 return UNKNOWN_ERROR; 101 default: 102 return err; 103 } 104} 105 106struct MessageList : public RefBase { 107 MessageList() { 108 } 109 virtual ~MessageList() { 110 } 111 std::list<sp<AMessage> > &getList() { return mList; } 112private: 113 std::list<sp<AMessage> > mList; 114 115 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 116}; 117 118static sp<DataConverter> getCopyConverter() { 119 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited 120 static sp<DataConverter> sCopyConverter; // zero-inited 121 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); 122 return sCopyConverter; 123} 124 125struct CodecObserver : public BnOMXObserver { 126 CodecObserver() {} 127 128 void setNotificationMessage(const sp<AMessage> &msg) { 129 mNotify = msg; 130 } 131 132 // from IOMXObserver 133 virtual void onMessages(const std::list<omx_message> &messages) { 134 if (messages.empty()) { 135 return; 136 } 137 138 sp<AMessage> notify = mNotify->dup(); 139 bool first = true; 140 sp<MessageList> msgList = new MessageList(); 141 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 142 it != messages.cend(); ++it) { 143 const omx_message &omx_msg = *it; 144 if (first) { 145 notify->setInt32("node", omx_msg.node); 146 first = false; 147 } 148 149 sp<AMessage> msg = new AMessage; 150 msg->setInt32("type", omx_msg.type); 151 switch (omx_msg.type) { 152 case omx_message::EVENT: 153 { 154 msg->setInt32("event", omx_msg.u.event_data.event); 155 msg->setInt32("data1", omx_msg.u.event_data.data1); 156 msg->setInt32("data2", omx_msg.u.event_data.data2); 157 break; 158 } 159 160 case omx_message::EMPTY_BUFFER_DONE: 161 { 162 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 163 msg->setInt32("fence_fd", omx_msg.fenceFd); 164 break; 165 } 166 167 case omx_message::FILL_BUFFER_DONE: 168 { 169 msg->setInt32( 170 "buffer", omx_msg.u.extended_buffer_data.buffer); 171 msg->setInt32( 172 "range_offset", 173 omx_msg.u.extended_buffer_data.range_offset); 174 msg->setInt32( 175 "range_length", 176 omx_msg.u.extended_buffer_data.range_length); 177 msg->setInt32( 178 "flags", 179 omx_msg.u.extended_buffer_data.flags); 180 msg->setInt64( 181 "timestamp", 182 omx_msg.u.extended_buffer_data.timestamp); 183 msg->setInt32( 184 "fence_fd", omx_msg.fenceFd); 185 break; 186 } 187 188 case omx_message::FRAME_RENDERED: 189 { 190 msg->setInt64( 191 "media_time_us", omx_msg.u.render_data.timestamp); 192 msg->setInt64( 193 "system_nano", omx_msg.u.render_data.nanoTime); 194 break; 195 } 196 197 default: 198 ALOGE("Unrecognized message type: %d", omx_msg.type); 199 break; 200 } 201 msgList->getList().push_back(msg); 202 } 203 notify->setObject("messages", msgList); 204 notify->post(); 205 } 206 207protected: 208 virtual ~CodecObserver() {} 209 210private: 211 sp<AMessage> mNotify; 212 213 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 214}; 215 216//////////////////////////////////////////////////////////////////////////////// 217 218struct ACodec::BaseState : public AState { 219 BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 220 221protected: 222 enum PortMode { 223 KEEP_BUFFERS, 224 RESUBMIT_BUFFERS, 225 FREE_BUFFERS, 226 }; 227 228 ACodec *mCodec; 229 230 virtual PortMode getPortMode(OMX_U32 portIndex); 231 232 virtual bool onMessageReceived(const sp<AMessage> &msg); 233 234 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 235 236 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 237 virtual void onInputBufferFilled(const sp<AMessage> &msg); 238 239 void postFillThisBuffer(BufferInfo *info); 240 241private: 242 // Handles an OMX message. Returns true iff message was handled. 243 bool onOMXMessage(const sp<AMessage> &msg); 244 245 // Handles a list of messages. Returns true iff messages were handled. 246 bool onOMXMessageList(const sp<AMessage> &msg); 247 248 // returns true iff this message is for this component and the component is alive 249 bool checkOMXMessage(const sp<AMessage> &msg); 250 251 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 252 253 bool onOMXFillBufferDone( 254 IOMX::buffer_id bufferID, 255 size_t rangeOffset, size_t rangeLength, 256 OMX_U32 flags, 257 int64_t timeUs, 258 int fenceFd); 259 260 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 261 262 void getMoreInputDataIfPossible(); 263 264 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 265}; 266 267//////////////////////////////////////////////////////////////////////////////// 268 269struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 270 DeathNotifier(const sp<AMessage> ¬ify) 271 : mNotify(notify) { 272 } 273 274 virtual void binderDied(const wp<IBinder> &) { 275 mNotify->post(); 276 } 277 278protected: 279 virtual ~DeathNotifier() {} 280 281private: 282 sp<AMessage> mNotify; 283 284 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 285}; 286 287struct ACodec::UninitializedState : public ACodec::BaseState { 288 UninitializedState(ACodec *codec); 289 290protected: 291 virtual bool onMessageReceived(const sp<AMessage> &msg); 292 virtual void stateEntered(); 293 294private: 295 void onSetup(const sp<AMessage> &msg); 296 bool onAllocateComponent(const sp<AMessage> &msg); 297 298 sp<DeathNotifier> mDeathNotifier; 299 300 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 301}; 302 303//////////////////////////////////////////////////////////////////////////////// 304 305struct ACodec::LoadedState : public ACodec::BaseState { 306 LoadedState(ACodec *codec); 307 308protected: 309 virtual bool onMessageReceived(const sp<AMessage> &msg); 310 virtual void stateEntered(); 311 312private: 313 friend struct ACodec::UninitializedState; 314 315 bool onConfigureComponent(const sp<AMessage> &msg); 316 void onCreateInputSurface(const sp<AMessage> &msg); 317 void onSetInputSurface(const sp<AMessage> &msg); 318 void onStart(); 319 void onShutdown(bool keepComponentAllocated); 320 321 status_t setupInputSurface(); 322 323 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 324}; 325 326//////////////////////////////////////////////////////////////////////////////// 327 328struct ACodec::LoadedToIdleState : public ACodec::BaseState { 329 LoadedToIdleState(ACodec *codec); 330 331protected: 332 virtual bool onMessageReceived(const sp<AMessage> &msg); 333 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 334 virtual void stateEntered(); 335 336private: 337 status_t allocateBuffers(); 338 339 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 340}; 341 342//////////////////////////////////////////////////////////////////////////////// 343 344struct ACodec::IdleToExecutingState : public ACodec::BaseState { 345 IdleToExecutingState(ACodec *codec); 346 347protected: 348 virtual bool onMessageReceived(const sp<AMessage> &msg); 349 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 350 virtual void stateEntered(); 351 352private: 353 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 354}; 355 356//////////////////////////////////////////////////////////////////////////////// 357 358struct ACodec::ExecutingState : public ACodec::BaseState { 359 ExecutingState(ACodec *codec); 360 361 void submitRegularOutputBuffers(); 362 void submitOutputMetaBuffers(); 363 void submitOutputBuffers(); 364 365 // Submit output buffers to the decoder, submit input buffers to client 366 // to fill with data. 367 void resume(); 368 369 // Returns true iff input and output buffers are in play. 370 bool active() const { return mActive; } 371 372protected: 373 virtual PortMode getPortMode(OMX_U32 portIndex); 374 virtual bool onMessageReceived(const sp<AMessage> &msg); 375 virtual void stateEntered(); 376 377 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 378 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 379 380private: 381 bool mActive; 382 383 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 384}; 385 386//////////////////////////////////////////////////////////////////////////////// 387 388struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 389 OutputPortSettingsChangedState(ACodec *codec); 390 391protected: 392 virtual PortMode getPortMode(OMX_U32 portIndex); 393 virtual bool onMessageReceived(const sp<AMessage> &msg); 394 virtual void stateEntered(); 395 396 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 397 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 398 399private: 400 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 401}; 402 403//////////////////////////////////////////////////////////////////////////////// 404 405struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 406 ExecutingToIdleState(ACodec *codec); 407 408protected: 409 virtual bool onMessageReceived(const sp<AMessage> &msg); 410 virtual void stateEntered(); 411 412 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 413 414 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 415 virtual void onInputBufferFilled(const sp<AMessage> &msg); 416 417private: 418 void changeStateIfWeOwnAllBuffers(); 419 420 bool mComponentNowIdle; 421 422 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 423}; 424 425//////////////////////////////////////////////////////////////////////////////// 426 427struct ACodec::IdleToLoadedState : public ACodec::BaseState { 428 IdleToLoadedState(ACodec *codec); 429 430protected: 431 virtual bool onMessageReceived(const sp<AMessage> &msg); 432 virtual void stateEntered(); 433 434 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 435 436private: 437 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 438}; 439 440//////////////////////////////////////////////////////////////////////////////// 441 442struct ACodec::FlushingState : public ACodec::BaseState { 443 FlushingState(ACodec *codec); 444 445protected: 446 virtual bool onMessageReceived(const sp<AMessage> &msg); 447 virtual void stateEntered(); 448 449 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 450 451 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 452 virtual void onInputBufferFilled(const sp<AMessage> &msg); 453 454private: 455 bool mFlushComplete[2]; 456 457 void changeStateIfWeOwnAllBuffers(); 458 459 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 460}; 461 462//////////////////////////////////////////////////////////////////////////////// 463 464void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 465 if (mFenceFd >= 0) { 466 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 467 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 468 } 469 mFenceFd = fenceFd; 470 mIsReadFence = false; 471} 472 473void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 474 if (mFenceFd >= 0) { 475 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 476 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 477 } 478 mFenceFd = fenceFd; 479 mIsReadFence = true; 480} 481 482void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 483 if (mFenceFd >= 0 && mIsReadFence) { 484 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 485 } 486} 487 488void ACodec::BufferInfo::checkReadFence(const char *dbg) { 489 if (mFenceFd >= 0 && !mIsReadFence) { 490 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 491 } 492} 493 494//////////////////////////////////////////////////////////////////////////////// 495 496ACodec::ACodec() 497 : mQuirks(0), 498 mNode(0), 499 mUsingNativeWindow(false), 500 mNativeWindowUsageBits(0), 501 mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN), 502 mIsVideo(false), 503 mIsEncoder(false), 504 mFatalError(false), 505 mShutdownInProgress(false), 506 mExplicitShutdown(false), 507 mIsLegacyVP9Decoder(false), 508 mEncoderDelay(0), 509 mEncoderPadding(0), 510 mRotationDegrees(0), 511 mChannelMaskPresent(false), 512 mChannelMask(0), 513 mDequeueCounter(0), 514 mInputMetadataType(kMetadataBufferTypeInvalid), 515 mOutputMetadataType(kMetadataBufferTypeInvalid), 516 mLegacyAdaptiveExperiment(false), 517 mMetadataBuffersToSubmit(0), 518 mNumUndequeuedBuffers(0), 519 mRepeatFrameDelayUs(-1ll), 520 mMaxPtsGapUs(-1ll), 521 mMaxFps(-1), 522 mTimePerFrameUs(-1ll), 523 mTimePerCaptureUs(-1ll), 524 mCreateInputBuffersSuspended(false), 525 mTunneled(false), 526 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0), 527 mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0) { 528 mUninitializedState = new UninitializedState(this); 529 mLoadedState = new LoadedState(this); 530 mLoadedToIdleState = new LoadedToIdleState(this); 531 mIdleToExecutingState = new IdleToExecutingState(this); 532 mExecutingState = new ExecutingState(this); 533 534 mOutputPortSettingsChangedState = 535 new OutputPortSettingsChangedState(this); 536 537 mExecutingToIdleState = new ExecutingToIdleState(this); 538 mIdleToLoadedState = new IdleToLoadedState(this); 539 mFlushingState = new FlushingState(this); 540 541 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 542 mInputEOSResult = OK; 543 544 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 545 546 changeState(mUninitializedState); 547} 548 549ACodec::~ACodec() { 550} 551 552void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 553 mNotify = msg; 554} 555 556void ACodec::initiateSetup(const sp<AMessage> &msg) { 557 msg->setWhat(kWhatSetup); 558 msg->setTarget(this); 559 msg->post(); 560} 561 562void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 563 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 564 msg->setMessage("params", params); 565 msg->post(); 566} 567 568void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 569 msg->setWhat(kWhatAllocateComponent); 570 msg->setTarget(this); 571 msg->post(); 572} 573 574void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 575 msg->setWhat(kWhatConfigureComponent); 576 msg->setTarget(this); 577 msg->post(); 578} 579 580status_t ACodec::setSurface(const sp<Surface> &surface) { 581 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 582 msg->setObject("surface", surface); 583 584 sp<AMessage> response; 585 status_t err = msg->postAndAwaitResponse(&response); 586 587 if (err == OK) { 588 (void)response->findInt32("err", &err); 589 } 590 return err; 591} 592 593void ACodec::initiateCreateInputSurface() { 594 (new AMessage(kWhatCreateInputSurface, this))->post(); 595} 596 597void ACodec::initiateSetInputSurface( 598 const sp<PersistentSurface> &surface) { 599 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 600 msg->setObject("input-surface", surface); 601 msg->post(); 602} 603 604void ACodec::signalEndOfInputStream() { 605 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 606} 607 608void ACodec::initiateStart() { 609 (new AMessage(kWhatStart, this))->post(); 610} 611 612void ACodec::signalFlush() { 613 ALOGV("[%s] signalFlush", mComponentName.c_str()); 614 (new AMessage(kWhatFlush, this))->post(); 615} 616 617void ACodec::signalResume() { 618 (new AMessage(kWhatResume, this))->post(); 619} 620 621void ACodec::initiateShutdown(bool keepComponentAllocated) { 622 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 623 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 624 msg->post(); 625 if (!keepComponentAllocated) { 626 // ensure shutdown completes in 3 seconds 627 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 628 } 629} 630 631void ACodec::signalRequestIDRFrame() { 632 (new AMessage(kWhatRequestIDRFrame, this))->post(); 633} 634 635// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 636// Some codecs may return input buffers before having them processed. 637// This causes a halt if we already signaled an EOS on the input 638// port. For now keep submitting an output buffer if there was an 639// EOS on the input port, but not yet on the output port. 640void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 641 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 642 mMetadataBuffersToSubmit > 0) { 643 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 644 } 645} 646 647status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 648 // allow keeping unset surface 649 if (surface == NULL) { 650 if (mNativeWindow != NULL) { 651 ALOGW("cannot unset a surface"); 652 return INVALID_OPERATION; 653 } 654 return OK; 655 } 656 657 // cannot switch from bytebuffers to surface 658 if (mNativeWindow == NULL) { 659 ALOGW("component was not configured with a surface"); 660 return INVALID_OPERATION; 661 } 662 663 ANativeWindow *nativeWindow = surface.get(); 664 // if we have not yet started the codec, we can simply set the native window 665 if (mBuffers[kPortIndexInput].size() == 0) { 666 mNativeWindow = surface; 667 return OK; 668 } 669 670 // we do not support changing a tunneled surface after start 671 if (mTunneled) { 672 ALOGW("cannot change tunneled surface"); 673 return INVALID_OPERATION; 674 } 675 676 int usageBits = 0; 677 // no need to reconnect as we will not dequeue all buffers 678 status_t err = setupNativeWindowSizeFormatAndUsage( 679 nativeWindow, &usageBits, 680 !storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment /* reconnect */); 681 if (err != OK) { 682 return err; 683 } 684 685 int ignoredFlags = kVideoGrallocUsage; 686 // New output surface is not allowed to add new usage flag except ignored ones. 687 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 688 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 689 return BAD_VALUE; 690 } 691 692 // get min undequeued count. We cannot switch to a surface that has a higher 693 // undequeued count than we allocated. 694 int minUndequeuedBuffers = 0; 695 err = nativeWindow->query( 696 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 697 &minUndequeuedBuffers); 698 if (err != 0) { 699 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 700 strerror(-err), -err); 701 return err; 702 } 703 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 704 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 705 minUndequeuedBuffers, mNumUndequeuedBuffers); 706 return BAD_VALUE; 707 } 708 709 // we cannot change the number of output buffers while OMX is running 710 // set up surface to the same count 711 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 712 ALOGV("setting up surface for %zu buffers", buffers.size()); 713 714 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 715 if (err != 0) { 716 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 717 -err); 718 return err; 719 } 720 721 // need to enable allocation when attaching 722 surface->getIGraphicBufferProducer()->allowAllocation(true); 723 724 // for meta data mode, we move dequeud buffers to the new surface. 725 // for non-meta mode, we must move all registered buffers 726 for (size_t i = 0; i < buffers.size(); ++i) { 727 const BufferInfo &info = buffers[i]; 728 // skip undequeued buffers for meta data mode 729 if (storingMetadataInDecodedBuffers() 730 && !mLegacyAdaptiveExperiment 731 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 732 ALOGV("skipping buffer"); 733 continue; 734 } 735 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 736 737 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 738 if (err != OK) { 739 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 740 info.mGraphicBuffer->getNativeBuffer(), 741 strerror(-err), -err); 742 return err; 743 } 744 } 745 746 // cancel undequeued buffers to new surface 747 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 748 for (size_t i = 0; i < buffers.size(); ++i) { 749 BufferInfo &info = buffers.editItemAt(i); 750 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 751 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 752 err = nativeWindow->cancelBuffer( 753 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 754 info.mFenceFd = -1; 755 if (err != OK) { 756 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 757 info.mGraphicBuffer->getNativeBuffer(), 758 strerror(-err), -err); 759 return err; 760 } 761 } 762 } 763 // disallow further allocation 764 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 765 } 766 767 // push blank buffers to previous window if requested 768 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 769 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 770 } 771 772 mNativeWindow = nativeWindow; 773 mNativeWindowUsageBits = usageBits; 774 return OK; 775} 776 777status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 778 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 779 780 CHECK(mDealer[portIndex] == NULL); 781 CHECK(mBuffers[portIndex].isEmpty()); 782 783 status_t err; 784 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 785 if (storingMetadataInDecodedBuffers()) { 786 err = allocateOutputMetadataBuffers(); 787 } else { 788 err = allocateOutputBuffersFromNativeWindow(); 789 } 790 } else { 791 OMX_PARAM_PORTDEFINITIONTYPE def; 792 InitOMXParams(&def); 793 def.nPortIndex = portIndex; 794 795 err = mOMX->getParameter( 796 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 797 798 if (err == OK) { 799 MetadataBufferType type = 800 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 801 size_t bufSize = def.nBufferSize; 802 if (type == kMetadataBufferTypeANWBuffer) { 803 bufSize = sizeof(VideoNativeMetadata); 804 } else if (type == kMetadataBufferTypeNativeHandleSource) { 805 bufSize = sizeof(VideoNativeHandleMetadata); 806 } 807 808 // If using gralloc or native source input metadata buffers, allocate largest 809 // metadata size as we prefer to generate native source metadata, but component 810 // may require gralloc source. For camera source, allocate at least enough 811 // size for native metadata buffers. 812 size_t allottedSize = bufSize; 813 if (portIndex == kPortIndexInput && type == kMetadataBufferTypeANWBuffer) { 814 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 815 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 816 bufSize = max(bufSize, sizeof(VideoNativeMetadata)); 817 } 818 819 size_t conversionBufferSize = 0; 820 821 sp<DataConverter> converter = mConverter[portIndex]; 822 if (converter != NULL) { 823 // here we assume sane conversions of max 4:1, so result fits in int32 824 if (portIndex == kPortIndexInput) { 825 conversionBufferSize = converter->sourceSize(bufSize); 826 } else { 827 conversionBufferSize = converter->targetSize(bufSize); 828 } 829 } 830 831 size_t alignment = MemoryDealer::getAllocationAlignment(); 832 833 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", 834 mComponentName.c_str(), 835 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 836 portIndex == kPortIndexInput ? "input" : "output"); 837 838 // verify buffer sizes to avoid overflow in align() 839 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { 840 ALOGE("b/22885421"); 841 return NO_MEMORY; 842 } 843 844 // don't modify bufSize as OMX may not expect it to increase after negotiation 845 size_t alignedSize = align(bufSize, alignment); 846 size_t alignedConvSize = align(conversionBufferSize, alignment); 847 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { 848 ALOGE("b/22885421"); 849 return NO_MEMORY; 850 } 851 852 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); 853 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 854 855 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 856 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 857 if (mem == NULL || mem->pointer() == NULL) { 858 return NO_MEMORY; 859 } 860 861 BufferInfo info; 862 info.mStatus = BufferInfo::OWNED_BY_US; 863 info.mFenceFd = -1; 864 info.mRenderInfo = NULL; 865 info.mNativeHandle = NULL; 866 867 uint32_t requiresAllocateBufferBit = 868 (portIndex == kPortIndexInput) 869 ? kRequiresAllocateBufferOnInputPorts 870 : kRequiresAllocateBufferOnOutputPorts; 871 872 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 873 mem.clear(); 874 875 void *ptr = NULL; 876 sp<NativeHandle> native_handle; 877 err = mOMX->allocateSecureBuffer( 878 mNode, portIndex, bufSize, &info.mBufferID, 879 &ptr, &native_handle); 880 881 // TRICKY: this representation is unorthodox, but ACodec requires 882 // an ABuffer with a proper size to validate range offsets and lengths. 883 // Since mData is never referenced for secure input, it is used to store 884 // either the pointer to the secure buffer, or the opaque handle as on 885 // some devices ptr is actually an opaque handle, not a pointer. 886 887 // TRICKY2: use native handle as the base of the ABuffer if received one, 888 // because Widevine source only receives these base addresses. 889 const native_handle_t *native_handle_ptr = 890 native_handle == NULL ? NULL : native_handle->handle(); 891 info.mData = new ABuffer( 892 ptr != NULL ? ptr : (void *)native_handle_ptr, bufSize); 893 info.mNativeHandle = native_handle; 894 info.mCodecData = info.mData; 895 } else if (mQuirks & requiresAllocateBufferBit) { 896 err = mOMX->allocateBufferWithBackup( 897 mNode, portIndex, mem, &info.mBufferID, allottedSize); 898 } else { 899 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize); 900 } 901 902 if (mem != NULL) { 903 info.mCodecData = new ABuffer(mem->pointer(), bufSize); 904 info.mCodecRef = mem; 905 906 if (type == kMetadataBufferTypeANWBuffer) { 907 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 908 } 909 910 // if we require conversion, allocate conversion buffer for client use; 911 // otherwise, reuse codec buffer 912 if (mConverter[portIndex] != NULL) { 913 CHECK_GT(conversionBufferSize, (size_t)0); 914 mem = mDealer[portIndex]->allocate(conversionBufferSize); 915 if (mem == NULL|| mem->pointer() == NULL) { 916 return NO_MEMORY; 917 } 918 info.mData = new ABuffer(mem->pointer(), conversionBufferSize); 919 info.mMemRef = mem; 920 } else { 921 info.mData = info.mCodecData; 922 info.mMemRef = info.mCodecRef; 923 } 924 } 925 926 mBuffers[portIndex].push(info); 927 } 928 } 929 } 930 931 if (err != OK) { 932 return err; 933 } 934 935 sp<AMessage> notify = mNotify->dup(); 936 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 937 938 notify->setInt32("portIndex", portIndex); 939 940 sp<PortDescription> desc = new PortDescription; 941 942 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 943 const BufferInfo &info = mBuffers[portIndex][i]; 944 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef); 945 } 946 947 notify->setObject("portDesc", desc); 948 notify->post(); 949 950 return OK; 951} 952 953status_t ACodec::setupNativeWindowSizeFormatAndUsage( 954 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */, 955 bool reconnect) { 956 OMX_PARAM_PORTDEFINITIONTYPE def; 957 InitOMXParams(&def); 958 def.nPortIndex = kPortIndexOutput; 959 960 status_t err = mOMX->getParameter( 961 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 962 963 if (err != OK) { 964 return err; 965 } 966 967 OMX_U32 usage = 0; 968 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 969 if (err != 0) { 970 ALOGW("querying usage flags from OMX IL component failed: %d", err); 971 // XXX: Currently this error is logged, but not fatal. 972 usage = 0; 973 } 974 int omxUsage = usage; 975 976 if (mFlags & kFlagIsGrallocUsageProtected) { 977 usage |= GRALLOC_USAGE_PROTECTED; 978 } 979 980 usage |= kVideoGrallocUsage; 981 *finalUsage = usage; 982 983 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 984 mLastNativeWindowDataSpace = HAL_DATASPACE_UNKNOWN; 985 986 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 987 return setNativeWindowSizeFormatAndUsage( 988 nativeWindow, 989 def.format.video.nFrameWidth, 990 def.format.video.nFrameHeight, 991 def.format.video.eColorFormat, 992 mRotationDegrees, 993 usage, 994 reconnect); 995} 996 997status_t ACodec::configureOutputBuffersFromNativeWindow( 998 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 999 OMX_U32 *minUndequeuedBuffers, bool preregister) { 1000 1001 OMX_PARAM_PORTDEFINITIONTYPE def; 1002 InitOMXParams(&def); 1003 def.nPortIndex = kPortIndexOutput; 1004 1005 status_t err = mOMX->getParameter( 1006 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1007 1008 if (err == OK) { 1009 err = setupNativeWindowSizeFormatAndUsage( 1010 mNativeWindow.get(), &mNativeWindowUsageBits, preregister /* reconnect */); 1011 } 1012 if (err != OK) { 1013 mNativeWindowUsageBits = 0; 1014 return err; 1015 } 1016 1017 // Exits here for tunneled video playback codecs -- i.e. skips native window 1018 // buffer allocation step as this is managed by the tunneled OMX omponent 1019 // itself and explicitly sets def.nBufferCountActual to 0. 1020 if (mTunneled) { 1021 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 1022 def.nBufferCountActual = 0; 1023 err = mOMX->setParameter( 1024 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1025 1026 *minUndequeuedBuffers = 0; 1027 *bufferCount = 0; 1028 *bufferSize = 0; 1029 return err; 1030 } 1031 1032 *minUndequeuedBuffers = 0; 1033 err = mNativeWindow->query( 1034 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 1035 (int *)minUndequeuedBuffers); 1036 1037 if (err != 0) { 1038 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 1039 strerror(-err), -err); 1040 return err; 1041 } 1042 1043 // FIXME: assume that surface is controlled by app (native window 1044 // returns the number for the case when surface is not controlled by app) 1045 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 1046 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 1047 1048 // Use conservative allocation while also trying to reduce starvation 1049 // 1050 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 1051 // minimum needed for the consumer to be able to work 1052 // 2. try to allocate two (2) additional buffers to reduce starvation from 1053 // the consumer 1054 // plus an extra buffer to account for incorrect minUndequeuedBufs 1055 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 1056 OMX_U32 newBufferCount = 1057 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1058 def.nBufferCountActual = newBufferCount; 1059 err = mOMX->setParameter( 1060 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1061 1062 if (err == OK) { 1063 *minUndequeuedBuffers += extraBuffers; 1064 break; 1065 } 1066 1067 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1068 mComponentName.c_str(), newBufferCount, err); 1069 /* exit condition */ 1070 if (extraBuffers == 0) { 1071 return err; 1072 } 1073 } 1074 1075 err = native_window_set_buffer_count( 1076 mNativeWindow.get(), def.nBufferCountActual); 1077 1078 if (err != 0) { 1079 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1080 -err); 1081 return err; 1082 } 1083 1084 *bufferCount = def.nBufferCountActual; 1085 *bufferSize = def.nBufferSize; 1086 return err; 1087} 1088 1089status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1090 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1091 status_t err = configureOutputBuffersFromNativeWindow( 1092 &bufferCount, &bufferSize, &minUndequeuedBuffers, true /* preregister */); 1093 if (err != 0) 1094 return err; 1095 mNumUndequeuedBuffers = minUndequeuedBuffers; 1096 1097 if (!storingMetadataInDecodedBuffers()) { 1098 static_cast<Surface*>(mNativeWindow.get()) 1099 ->getIGraphicBufferProducer()->allowAllocation(true); 1100 } 1101 1102 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1103 "output port", 1104 mComponentName.c_str(), bufferCount, bufferSize); 1105 1106 // Dequeue buffers and send them to OMX 1107 for (OMX_U32 i = 0; i < bufferCount; i++) { 1108 ANativeWindowBuffer *buf; 1109 int fenceFd; 1110 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1111 if (err != 0) { 1112 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1113 break; 1114 } 1115 1116 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1117 BufferInfo info; 1118 info.mStatus = BufferInfo::OWNED_BY_US; 1119 info.mFenceFd = fenceFd; 1120 info.mIsReadFence = false; 1121 info.mRenderInfo = NULL; 1122 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 1123 info.mCodecData = info.mData; 1124 info.mGraphicBuffer = graphicBuffer; 1125 mBuffers[kPortIndexOutput].push(info); 1126 1127 IOMX::buffer_id bufferId; 1128 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 1129 &bufferId); 1130 if (err != 0) { 1131 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1132 "%d", i, err); 1133 break; 1134 } 1135 1136 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1137 1138 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1139 mComponentName.c_str(), 1140 bufferId, graphicBuffer.get()); 1141 } 1142 1143 OMX_U32 cancelStart; 1144 OMX_U32 cancelEnd; 1145 1146 if (err != 0) { 1147 // If an error occurred while dequeuing we need to cancel any buffers 1148 // that were dequeued. 1149 cancelStart = 0; 1150 cancelEnd = mBuffers[kPortIndexOutput].size(); 1151 } else { 1152 // Return the required minimum undequeued buffers to the native window. 1153 cancelStart = bufferCount - minUndequeuedBuffers; 1154 cancelEnd = bufferCount; 1155 } 1156 1157 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1158 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1159 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1160 status_t error = cancelBufferToNativeWindow(info); 1161 if (err == 0) { 1162 err = error; 1163 } 1164 } 1165 } 1166 1167 if (!storingMetadataInDecodedBuffers()) { 1168 static_cast<Surface*>(mNativeWindow.get()) 1169 ->getIGraphicBufferProducer()->allowAllocation(false); 1170 } 1171 1172 return err; 1173} 1174 1175status_t ACodec::allocateOutputMetadataBuffers() { 1176 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1177 status_t err = configureOutputBuffersFromNativeWindow( 1178 &bufferCount, &bufferSize, &minUndequeuedBuffers, 1179 mLegacyAdaptiveExperiment /* preregister */); 1180 if (err != 0) 1181 return err; 1182 mNumUndequeuedBuffers = minUndequeuedBuffers; 1183 1184 ALOGV("[%s] Allocating %u meta buffers on output port", 1185 mComponentName.c_str(), bufferCount); 1186 1187 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1188 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1189 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1190 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1191 1192 // Dequeue buffers and send them to OMX 1193 for (OMX_U32 i = 0; i < bufferCount; i++) { 1194 BufferInfo info; 1195 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1196 info.mFenceFd = -1; 1197 info.mRenderInfo = NULL; 1198 info.mGraphicBuffer = NULL; 1199 info.mDequeuedAt = mDequeueCounter; 1200 1201 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1202 if (mem == NULL || mem->pointer() == NULL) { 1203 return NO_MEMORY; 1204 } 1205 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1206 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1207 } 1208 info.mData = new ABuffer(mem->pointer(), mem->size()); 1209 info.mMemRef = mem; 1210 info.mCodecData = info.mData; 1211 info.mCodecRef = mem; 1212 1213 // we use useBuffer for metadata regardless of quirks 1214 err = mOMX->useBuffer( 1215 mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1216 mBuffers[kPortIndexOutput].push(info); 1217 1218 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1219 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1220 } 1221 1222 if (mLegacyAdaptiveExperiment) { 1223 // preallocate and preregister buffers 1224 static_cast<Surface *>(mNativeWindow.get()) 1225 ->getIGraphicBufferProducer()->allowAllocation(true); 1226 1227 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1228 "output port", 1229 mComponentName.c_str(), bufferCount, bufferSize); 1230 1231 // Dequeue buffers then cancel them all 1232 for (OMX_U32 i = 0; i < bufferCount; i++) { 1233 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1234 1235 ANativeWindowBuffer *buf; 1236 int fenceFd; 1237 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1238 if (err != 0) { 1239 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1240 break; 1241 } 1242 1243 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1244 mOMX->updateGraphicBufferInMeta( 1245 mNode, kPortIndexOutput, graphicBuffer, info->mBufferID); 1246 info->mStatus = BufferInfo::OWNED_BY_US; 1247 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1248 info->mGraphicBuffer = graphicBuffer; 1249 } 1250 1251 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1252 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1253 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1254 status_t error = cancelBufferToNativeWindow(info); 1255 if (err == OK) { 1256 err = error; 1257 } 1258 } 1259 } 1260 1261 static_cast<Surface*>(mNativeWindow.get()) 1262 ->getIGraphicBufferProducer()->allowAllocation(false); 1263 } 1264 1265 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1266 return err; 1267} 1268 1269status_t ACodec::submitOutputMetadataBuffer() { 1270 CHECK(storingMetadataInDecodedBuffers()); 1271 if (mMetadataBuffersToSubmit == 0) 1272 return OK; 1273 1274 BufferInfo *info = dequeueBufferFromNativeWindow(); 1275 if (info == NULL) { 1276 return ERROR_IO; 1277 } 1278 1279 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1280 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1281 1282 --mMetadataBuffersToSubmit; 1283 info->checkWriteFence("submitOutputMetadataBuffer"); 1284 status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd); 1285 info->mFenceFd = -1; 1286 if (err == OK) { 1287 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1288 } 1289 1290 return err; 1291} 1292 1293status_t ACodec::waitForFence(int fd, const char *dbg ) { 1294 status_t res = OK; 1295 if (fd >= 0) { 1296 sp<Fence> fence = new Fence(fd); 1297 res = fence->wait(IOMX::kFenceTimeoutMs); 1298 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1299 } 1300 return res; 1301} 1302 1303// static 1304const char *ACodec::_asString(BufferInfo::Status s) { 1305 switch (s) { 1306 case BufferInfo::OWNED_BY_US: return "OUR"; 1307 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1308 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1309 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1310 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1311 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1312 default: return "?"; 1313 } 1314} 1315 1316void ACodec::dumpBuffers(OMX_U32 portIndex) { 1317 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1318 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1319 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1320 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1321 const BufferInfo &info = mBuffers[portIndex][i]; 1322 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1323 i, info.mBufferID, info.mGraphicBuffer.get(), 1324 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1325 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1326 } 1327} 1328 1329status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1330 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1331 1332 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1333 mComponentName.c_str(), info->mBufferID); 1334 1335 info->checkWriteFence("cancelBufferToNativeWindow"); 1336 int err = mNativeWindow->cancelBuffer( 1337 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1338 info->mFenceFd = -1; 1339 1340 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1341 mComponentName.c_str(), info->mBufferID); 1342 // change ownership even if cancelBuffer fails 1343 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1344 1345 return err; 1346} 1347 1348void ACodec::updateRenderInfoForDequeuedBuffer( 1349 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1350 1351 info->mRenderInfo = 1352 mRenderTracker.updateInfoForDequeuedBuffer( 1353 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1354 1355 // check for any fences already signaled 1356 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1357} 1358 1359void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1360 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1361 mRenderTracker.dumpRenderQueue(); 1362 } 1363} 1364 1365void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1366 sp<AMessage> msg = mNotify->dup(); 1367 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1368 std::list<FrameRenderTracker::Info> done = 1369 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1370 1371 // unlink untracked frames 1372 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1373 it != done.cend(); ++it) { 1374 ssize_t index = it->getIndex(); 1375 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1376 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1377 } else if (index >= 0) { 1378 // THIS SHOULD NEVER HAPPEN 1379 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1380 } 1381 } 1382 1383 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1384 msg->post(); 1385 } 1386} 1387 1388ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1389 ANativeWindowBuffer *buf; 1390 CHECK(mNativeWindow.get() != NULL); 1391 1392 if (mTunneled) { 1393 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1394 " video playback mode mode!"); 1395 return NULL; 1396 } 1397 1398 if (mFatalError) { 1399 ALOGW("not dequeuing from native window due to fatal error"); 1400 return NULL; 1401 } 1402 1403 int fenceFd = -1; 1404 do { 1405 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1406 if (err != 0) { 1407 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1408 return NULL; 1409 } 1410 1411 bool stale = false; 1412 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1413 i--; 1414 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1415 1416 if (info->mGraphicBuffer != NULL && 1417 info->mGraphicBuffer->handle == buf->handle) { 1418 // Since consumers can attach buffers to BufferQueues, it is possible 1419 // that a known yet stale buffer can return from a surface that we 1420 // once used. We can simply ignore this as we have already dequeued 1421 // this buffer properly. NOTE: this does not eliminate all cases, 1422 // e.g. it is possible that we have queued the valid buffer to the 1423 // NW, and a stale copy of the same buffer gets dequeued - which will 1424 // be treated as the valid buffer by ACodec. 1425 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1426 ALOGI("dequeued stale buffer %p. discarding", buf); 1427 stale = true; 1428 break; 1429 } 1430 1431 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1432 info->mStatus = BufferInfo::OWNED_BY_US; 1433 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1434 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1435 return info; 1436 } 1437 } 1438 1439 // It is also possible to receive a previously unregistered buffer 1440 // in non-meta mode. These should be treated as stale buffers. The 1441 // same is possible in meta mode, in which case, it will be treated 1442 // as a normal buffer, which is not desirable. 1443 // TODO: fix this. 1444 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1445 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1446 stale = true; 1447 } 1448 if (stale) { 1449 // TODO: detach stale buffer, but there is no API yet to do it. 1450 buf = NULL; 1451 } 1452 } while (buf == NULL); 1453 1454 // get oldest undequeued buffer 1455 BufferInfo *oldest = NULL; 1456 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1457 i--; 1458 BufferInfo *info = 1459 &mBuffers[kPortIndexOutput].editItemAt(i); 1460 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1461 (oldest == NULL || 1462 // avoid potential issues from counter rolling over 1463 mDequeueCounter - info->mDequeuedAt > 1464 mDequeueCounter - oldest->mDequeuedAt)) { 1465 oldest = info; 1466 } 1467 } 1468 1469 // it is impossible dequeue a buffer when there are no buffers with ANW 1470 CHECK(oldest != NULL); 1471 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1472 // while loop above does not complete 1473 CHECK(storingMetadataInDecodedBuffers()); 1474 1475 // discard buffer in LRU info and replace with new buffer 1476 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1477 oldest->mStatus = BufferInfo::OWNED_BY_US; 1478 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1479 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1480 oldest->mRenderInfo = NULL; 1481 1482 mOMX->updateGraphicBufferInMeta( 1483 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 1484 oldest->mBufferID); 1485 1486 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1487 VideoGrallocMetadata *grallocMeta = 1488 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base()); 1489 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1490 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1491 mDequeueCounter - oldest->mDequeuedAt, 1492 (void *)(uintptr_t)grallocMeta->pHandle, 1493 oldest->mGraphicBuffer->handle, oldest->mData->base()); 1494 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1495 VideoNativeMetadata *nativeMeta = 1496 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base()); 1497 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1498 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1499 mDequeueCounter - oldest->mDequeuedAt, 1500 (void *)(uintptr_t)nativeMeta->pBuffer, 1501 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base()); 1502 } 1503 1504 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1505 return oldest; 1506} 1507 1508status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1509 status_t err = OK; 1510 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1511 i--; 1512 status_t err2 = freeBuffer(portIndex, i); 1513 if (err == OK) { 1514 err = err2; 1515 } 1516 } 1517 1518 // clear mDealer even on an error 1519 mDealer[portIndex].clear(); 1520 return err; 1521} 1522 1523status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1524 status_t err = OK; 1525 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1526 i--; 1527 BufferInfo *info = 1528 &mBuffers[kPortIndexOutput].editItemAt(i); 1529 1530 // At this time some buffers may still be with the component 1531 // or being drained. 1532 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1533 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1534 status_t err2 = freeBuffer(kPortIndexOutput, i); 1535 if (err == OK) { 1536 err = err2; 1537 } 1538 } 1539 } 1540 1541 return err; 1542} 1543 1544status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1545 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1546 status_t err = OK; 1547 1548 // there should not be any fences in the metadata 1549 MetadataBufferType type = 1550 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1551 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL 1552 && info->mData->size() >= sizeof(VideoNativeMetadata)) { 1553 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; 1554 if (fenceFd >= 0) { 1555 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1556 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1557 } 1558 } 1559 1560 switch (info->mStatus) { 1561 case BufferInfo::OWNED_BY_US: 1562 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1563 (void)cancelBufferToNativeWindow(info); 1564 } 1565 // fall through 1566 1567 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1568 err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID); 1569 break; 1570 1571 default: 1572 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1573 err = FAILED_TRANSACTION; 1574 break; 1575 } 1576 1577 if (info->mFenceFd >= 0) { 1578 ::close(info->mFenceFd); 1579 } 1580 1581 if (portIndex == kPortIndexOutput) { 1582 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1583 info->mRenderInfo = NULL; 1584 } 1585 1586 // remove buffer even if mOMX->freeBuffer fails 1587 mBuffers[portIndex].removeAt(i); 1588 return err; 1589} 1590 1591ACodec::BufferInfo *ACodec::findBufferByID( 1592 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1593 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1594 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1595 1596 if (info->mBufferID == bufferID) { 1597 if (index != NULL) { 1598 *index = i; 1599 } 1600 return info; 1601 } 1602 } 1603 1604 ALOGE("Could not find buffer with ID %u", bufferID); 1605 return NULL; 1606} 1607 1608status_t ACodec::setComponentRole( 1609 bool isEncoder, const char *mime) { 1610 const char *role = getComponentRole(isEncoder, mime); 1611 if (role == NULL) { 1612 return BAD_VALUE; 1613 } 1614 status_t err = setComponentRole(mOMX, mNode, role); 1615 if (err != OK) { 1616 ALOGW("[%s] Failed to set standard component role '%s'.", 1617 mComponentName.c_str(), role); 1618 } 1619 return err; 1620} 1621 1622//static 1623const char *ACodec::getComponentRole( 1624 bool isEncoder, const char *mime) { 1625 struct MimeToRole { 1626 const char *mime; 1627 const char *decoderRole; 1628 const char *encoderRole; 1629 }; 1630 1631 static const MimeToRole kMimeToRole[] = { 1632 { MEDIA_MIMETYPE_AUDIO_MPEG, 1633 "audio_decoder.mp3", "audio_encoder.mp3" }, 1634 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I, 1635 "audio_decoder.mp1", "audio_encoder.mp1" }, 1636 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, 1637 "audio_decoder.mp2", "audio_encoder.mp2" }, 1638 { MEDIA_MIMETYPE_AUDIO_AMR_NB, 1639 "audio_decoder.amrnb", "audio_encoder.amrnb" }, 1640 { MEDIA_MIMETYPE_AUDIO_AMR_WB, 1641 "audio_decoder.amrwb", "audio_encoder.amrwb" }, 1642 { MEDIA_MIMETYPE_AUDIO_AAC, 1643 "audio_decoder.aac", "audio_encoder.aac" }, 1644 { MEDIA_MIMETYPE_AUDIO_VORBIS, 1645 "audio_decoder.vorbis", "audio_encoder.vorbis" }, 1646 { MEDIA_MIMETYPE_AUDIO_OPUS, 1647 "audio_decoder.opus", "audio_encoder.opus" }, 1648 { MEDIA_MIMETYPE_AUDIO_G711_MLAW, 1649 "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" }, 1650 { MEDIA_MIMETYPE_AUDIO_G711_ALAW, 1651 "audio_decoder.g711alaw", "audio_encoder.g711alaw" }, 1652 { MEDIA_MIMETYPE_VIDEO_AVC, 1653 "video_decoder.avc", "video_encoder.avc" }, 1654 { MEDIA_MIMETYPE_VIDEO_HEVC, 1655 "video_decoder.hevc", "video_encoder.hevc" }, 1656 { MEDIA_MIMETYPE_VIDEO_MPEG4, 1657 "video_decoder.mpeg4", "video_encoder.mpeg4" }, 1658 { MEDIA_MIMETYPE_VIDEO_H263, 1659 "video_decoder.h263", "video_encoder.h263" }, 1660 { MEDIA_MIMETYPE_VIDEO_VP8, 1661 "video_decoder.vp8", "video_encoder.vp8" }, 1662 { MEDIA_MIMETYPE_VIDEO_VP9, 1663 "video_decoder.vp9", "video_encoder.vp9" }, 1664 { MEDIA_MIMETYPE_AUDIO_RAW, 1665 "audio_decoder.raw", "audio_encoder.raw" }, 1666 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, 1667 "video_decoder.dolby-vision", "video_encoder.dolby-vision" }, 1668 { MEDIA_MIMETYPE_AUDIO_FLAC, 1669 "audio_decoder.flac", "audio_encoder.flac" }, 1670 { MEDIA_MIMETYPE_AUDIO_MSGSM, 1671 "audio_decoder.gsm", "audio_encoder.gsm" }, 1672 { MEDIA_MIMETYPE_VIDEO_MPEG2, 1673 "video_decoder.mpeg2", "video_encoder.mpeg2" }, 1674 { MEDIA_MIMETYPE_AUDIO_AC3, 1675 "audio_decoder.ac3", "audio_encoder.ac3" }, 1676 { MEDIA_MIMETYPE_AUDIO_EAC3, 1677 "audio_decoder.eac3", "audio_encoder.eac3" }, 1678 }; 1679 1680 static const size_t kNumMimeToRole = 1681 sizeof(kMimeToRole) / sizeof(kMimeToRole[0]); 1682 1683 size_t i; 1684 for (i = 0; i < kNumMimeToRole; ++i) { 1685 if (!strcasecmp(mime, kMimeToRole[i].mime)) { 1686 break; 1687 } 1688 } 1689 1690 if (i == kNumMimeToRole) { 1691 return NULL; 1692 } 1693 1694 return isEncoder ? kMimeToRole[i].encoderRole 1695 : kMimeToRole[i].decoderRole; 1696} 1697 1698//static 1699status_t ACodec::setComponentRole( 1700 const sp<IOMX> &omx, IOMX::node_id node, const char *role) { 1701 OMX_PARAM_COMPONENTROLETYPE roleParams; 1702 InitOMXParams(&roleParams); 1703 1704 strncpy((char *)roleParams.cRole, 1705 role, OMX_MAX_STRINGNAME_SIZE - 1); 1706 1707 roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0'; 1708 1709 return omx->setParameter( 1710 node, OMX_IndexParamStandardComponentRole, 1711 &roleParams, sizeof(roleParams)); 1712} 1713 1714status_t ACodec::configureCodec( 1715 const char *mime, const sp<AMessage> &msg) { 1716 int32_t encoder; 1717 if (!msg->findInt32("encoder", &encoder)) { 1718 encoder = false; 1719 } 1720 1721 sp<AMessage> inputFormat = new AMessage; 1722 sp<AMessage> outputFormat = new AMessage; 1723 mConfigFormat = msg; 1724 1725 mIsEncoder = encoder; 1726 1727 mInputMetadataType = kMetadataBufferTypeInvalid; 1728 mOutputMetadataType = kMetadataBufferTypeInvalid; 1729 1730 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1731 1732 if (err != OK) { 1733 return err; 1734 } 1735 1736 int32_t bitRate = 0; 1737 // FLAC encoder doesn't need a bitrate, other encoders do 1738 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1739 && !msg->findInt32("bitrate", &bitRate)) { 1740 return INVALID_OPERATION; 1741 } 1742 1743 // propagate bitrate to the output so that the muxer has it 1744 if (encoder && msg->findInt32("bitrate", &bitRate)) { 1745 // Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the 1746 // average bitrate. We've been setting both bitrate and max-bitrate to this same value. 1747 outputFormat->setInt32("bitrate", bitRate); 1748 outputFormat->setInt32("max-bitrate", bitRate); 1749 } 1750 1751 int32_t storeMeta; 1752 if (encoder 1753 && msg->findInt32("android._input-metadata-buffer-type", &storeMeta) 1754 && storeMeta != kMetadataBufferTypeInvalid) { 1755 mInputMetadataType = (MetadataBufferType)storeMeta; 1756 err = mOMX->storeMetaDataInBuffers( 1757 mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1758 if (err != OK) { 1759 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1760 mComponentName.c_str(), err); 1761 1762 return err; 1763 } else if (storeMeta == kMetadataBufferTypeANWBuffer 1764 && mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1765 // IOMX translates ANWBuffers to gralloc source already. 1766 mInputMetadataType = (MetadataBufferType)storeMeta; 1767 } 1768 1769 uint32_t usageBits; 1770 if (mOMX->getParameter( 1771 mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1772 &usageBits, sizeof(usageBits)) == OK) { 1773 inputFormat->setInt32( 1774 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1775 } 1776 } 1777 1778 int32_t prependSPSPPS = 0; 1779 if (encoder 1780 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1781 && prependSPSPPS != 0) { 1782 OMX_INDEXTYPE index; 1783 err = mOMX->getExtensionIndex( 1784 mNode, 1785 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1786 &index); 1787 1788 if (err == OK) { 1789 PrependSPSPPSToIDRFramesParams params; 1790 InitOMXParams(¶ms); 1791 params.bEnable = OMX_TRUE; 1792 1793 err = mOMX->setParameter( 1794 mNode, index, ¶ms, sizeof(params)); 1795 } 1796 1797 if (err != OK) { 1798 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1799 "IDR frames. (err %d)", err); 1800 1801 return err; 1802 } 1803 } 1804 1805 // Only enable metadata mode on encoder output if encoder can prepend 1806 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1807 // opaque handle, to which we don't have access. 1808 int32_t video = !strncasecmp(mime, "video/", 6); 1809 mIsVideo = video; 1810 if (encoder && video) { 1811 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1812 && msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta) 1813 && storeMeta != 0); 1814 1815 mOutputMetadataType = kMetadataBufferTypeNativeHandleSource; 1816 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType); 1817 if (err != OK) { 1818 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1819 mComponentName.c_str(), err); 1820 } 1821 1822 if (!msg->findInt64( 1823 "repeat-previous-frame-after", 1824 &mRepeatFrameDelayUs)) { 1825 mRepeatFrameDelayUs = -1ll; 1826 } 1827 1828 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1829 mMaxPtsGapUs = -1ll; 1830 } 1831 1832 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1833 mMaxFps = -1; 1834 } 1835 1836 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1837 mTimePerCaptureUs = -1ll; 1838 } 1839 1840 if (!msg->findInt32( 1841 "create-input-buffers-suspended", 1842 (int32_t*)&mCreateInputBuffersSuspended)) { 1843 mCreateInputBuffersSuspended = false; 1844 } 1845 } 1846 1847 // NOTE: we only use native window for video decoders 1848 sp<RefBase> obj; 1849 bool haveNativeWindow = msg->findObject("native-window", &obj) 1850 && obj != NULL && video && !encoder; 1851 mUsingNativeWindow = haveNativeWindow; 1852 mLegacyAdaptiveExperiment = false; 1853 if (video && !encoder) { 1854 inputFormat->setInt32("adaptive-playback", false); 1855 1856 int32_t usageProtected; 1857 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1858 if (!haveNativeWindow) { 1859 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1860 return PERMISSION_DENIED; 1861 } 1862 mFlags |= kFlagIsGrallocUsageProtected; 1863 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1864 } 1865 1866 if (mFlags & kFlagIsSecure) { 1867 // use native_handles for secure input buffers 1868 err = mOMX->enableNativeBuffers( 1869 mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1870 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1871 err = OK; // ignore error for now 1872 } 1873 } 1874 if (haveNativeWindow) { 1875 sp<ANativeWindow> nativeWindow = 1876 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1877 1878 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1879 int32_t autoFrc; 1880 if (msg->findInt32("auto-frc", &autoFrc)) { 1881 bool enabled = autoFrc; 1882 OMX_CONFIG_BOOLEANTYPE config; 1883 InitOMXParams(&config); 1884 config.bEnabled = (OMX_BOOL)enabled; 1885 status_t temp = mOMX->setConfig( 1886 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1887 &config, sizeof(config)); 1888 if (temp == OK) { 1889 outputFormat->setInt32("auto-frc", enabled); 1890 } else if (enabled) { 1891 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1892 } 1893 } 1894 // END of temporary support for automatic FRC 1895 1896 int32_t tunneled; 1897 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1898 tunneled != 0) { 1899 ALOGI("Configuring TUNNELED video playback."); 1900 mTunneled = true; 1901 1902 int32_t audioHwSync = 0; 1903 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1904 ALOGW("No Audio HW Sync provided for video tunnel"); 1905 } 1906 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1907 if (err != OK) { 1908 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1909 audioHwSync, nativeWindow.get()); 1910 return err; 1911 } 1912 1913 int32_t maxWidth = 0, maxHeight = 0; 1914 if (msg->findInt32("max-width", &maxWidth) && 1915 msg->findInt32("max-height", &maxHeight)) { 1916 1917 err = mOMX->prepareForAdaptivePlayback( 1918 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1919 if (err != OK) { 1920 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1921 mComponentName.c_str(), err); 1922 // allow failure 1923 err = OK; 1924 } else { 1925 inputFormat->setInt32("max-width", maxWidth); 1926 inputFormat->setInt32("max-height", maxHeight); 1927 inputFormat->setInt32("adaptive-playback", true); 1928 } 1929 } 1930 } else { 1931 ALOGV("Configuring CPU controlled video playback."); 1932 mTunneled = false; 1933 1934 // Explicity reset the sideband handle of the window for 1935 // non-tunneled video in case the window was previously used 1936 // for a tunneled video playback. 1937 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1938 if (err != OK) { 1939 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1940 return err; 1941 } 1942 1943 // Always try to enable dynamic output buffers on native surface 1944 mOutputMetadataType = kMetadataBufferTypeANWBuffer; 1945 err = mOMX->storeMetaDataInBuffers( 1946 mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1947 if (err != OK) { 1948 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1949 mComponentName.c_str(), err); 1950 1951 // if adaptive playback has been requested, try JB fallback 1952 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1953 // LARGE MEMORY REQUIREMENT 1954 1955 // we will not do adaptive playback on software accessed 1956 // surfaces as they never had to respond to changes in the 1957 // crop window, and we don't trust that they will be able to. 1958 int usageBits = 0; 1959 bool canDoAdaptivePlayback; 1960 1961 if (nativeWindow->query( 1962 nativeWindow.get(), 1963 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1964 &usageBits) != OK) { 1965 canDoAdaptivePlayback = false; 1966 } else { 1967 canDoAdaptivePlayback = 1968 (usageBits & 1969 (GRALLOC_USAGE_SW_READ_MASK | 1970 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1971 } 1972 1973 int32_t maxWidth = 0, maxHeight = 0; 1974 if (canDoAdaptivePlayback && 1975 msg->findInt32("max-width", &maxWidth) && 1976 msg->findInt32("max-height", &maxHeight)) { 1977 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1978 mComponentName.c_str(), maxWidth, maxHeight); 1979 1980 err = mOMX->prepareForAdaptivePlayback( 1981 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1982 maxHeight); 1983 ALOGW_IF(err != OK, 1984 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1985 mComponentName.c_str(), err); 1986 1987 if (err == OK) { 1988 inputFormat->setInt32("max-width", maxWidth); 1989 inputFormat->setInt32("max-height", maxHeight); 1990 inputFormat->setInt32("adaptive-playback", true); 1991 } 1992 } 1993 // allow failure 1994 err = OK; 1995 } else { 1996 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1997 mComponentName.c_str()); 1998 CHECK(storingMetadataInDecodedBuffers()); 1999 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 2000 "legacy-adaptive", !msg->contains("no-experiments")); 2001 2002 inputFormat->setInt32("adaptive-playback", true); 2003 } 2004 2005 int32_t push; 2006 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 2007 && push != 0) { 2008 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 2009 } 2010 } 2011 2012 int32_t rotationDegrees; 2013 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 2014 mRotationDegrees = rotationDegrees; 2015 } else { 2016 mRotationDegrees = 0; 2017 } 2018 } 2019 2020 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 2021 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 2022 // invalid encodings will default to PCM-16bit in setupRawAudioFormat. 2023 2024 if (video) { 2025 // determine need for software renderer 2026 bool usingSwRenderer = false; 2027 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 2028 usingSwRenderer = true; 2029 haveNativeWindow = false; 2030 } 2031 2032 if (encoder) { 2033 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 2034 } else { 2035 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2036 } 2037 2038 if (err != OK) { 2039 return err; 2040 } 2041 2042 if (haveNativeWindow) { 2043 mNativeWindow = static_cast<Surface *>(obj.get()); 2044 } 2045 2046 // initialize native window now to get actual output format 2047 // TODO: this is needed for some encoders even though they don't use native window 2048 err = initNativeWindow(); 2049 if (err != OK) { 2050 return err; 2051 } 2052 2053 // fallback for devices that do not handle flex-YUV for native buffers 2054 if (haveNativeWindow) { 2055 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 2056 if (msg->findInt32("color-format", &requestedColorFormat) && 2057 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 2058 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 2059 if (err != OK) { 2060 return err; 2061 } 2062 int32_t colorFormat = OMX_COLOR_FormatUnused; 2063 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 2064 if (!outputFormat->findInt32("color-format", &colorFormat)) { 2065 ALOGE("ouptut port did not have a color format (wrong domain?)"); 2066 return BAD_VALUE; 2067 } 2068 ALOGD("[%s] Requested output format %#x and got %#x.", 2069 mComponentName.c_str(), requestedColorFormat, colorFormat); 2070 if (!isFlexibleColorFormat( 2071 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 2072 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 2073 // device did not handle flex-YUV request for native window, fall back 2074 // to SW renderer 2075 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 2076 mNativeWindow.clear(); 2077 mNativeWindowUsageBits = 0; 2078 haveNativeWindow = false; 2079 usingSwRenderer = true; 2080 if (storingMetadataInDecodedBuffers()) { 2081 err = mOMX->storeMetaDataInBuffers( 2082 mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 2083 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 2084 // TODO: implement adaptive-playback support for bytebuffer mode. 2085 // This is done by SW codecs, but most HW codecs don't support it. 2086 inputFormat->setInt32("adaptive-playback", false); 2087 } 2088 if (err == OK) { 2089 err = mOMX->enableNativeBuffers( 2090 mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 2091 } 2092 if (mFlags & kFlagIsGrallocUsageProtected) { 2093 // fallback is not supported for protected playback 2094 err = PERMISSION_DENIED; 2095 } else if (err == OK) { 2096 err = setupVideoDecoder( 2097 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2098 } 2099 } 2100 } 2101 } 2102 2103 if (usingSwRenderer) { 2104 outputFormat->setInt32("using-sw-renderer", 1); 2105 } 2106 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2107 int32_t numChannels, sampleRate; 2108 if (!msg->findInt32("channel-count", &numChannels) 2109 || !msg->findInt32("sample-rate", &sampleRate)) { 2110 // Since we did not always check for these, leave them optional 2111 // and have the decoder figure it all out. 2112 err = OK; 2113 } else { 2114 err = setupRawAudioFormat( 2115 encoder ? kPortIndexInput : kPortIndexOutput, 2116 sampleRate, 2117 numChannels); 2118 } 2119 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2120 int32_t numChannels, sampleRate; 2121 if (!msg->findInt32("channel-count", &numChannels) 2122 || !msg->findInt32("sample-rate", &sampleRate)) { 2123 err = INVALID_OPERATION; 2124 } else { 2125 int32_t isADTS, aacProfile; 2126 int32_t sbrMode; 2127 int32_t maxOutputChannelCount; 2128 int32_t pcmLimiterEnable; 2129 drcParams_t drc; 2130 if (!msg->findInt32("is-adts", &isADTS)) { 2131 isADTS = 0; 2132 } 2133 if (!msg->findInt32("aac-profile", &aacProfile)) { 2134 aacProfile = OMX_AUDIO_AACObjectNull; 2135 } 2136 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2137 sbrMode = -1; 2138 } 2139 2140 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2141 maxOutputChannelCount = -1; 2142 } 2143 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2144 // value is unknown 2145 pcmLimiterEnable = -1; 2146 } 2147 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2148 // value is unknown 2149 drc.encodedTargetLevel = -1; 2150 } 2151 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2152 // value is unknown 2153 drc.drcCut = -1; 2154 } 2155 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2156 // value is unknown 2157 drc.drcBoost = -1; 2158 } 2159 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2160 // value is unknown 2161 drc.heavyCompression = -1; 2162 } 2163 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2164 // value is unknown 2165 drc.targetRefLevel = -1; 2166 } 2167 2168 err = setupAACCodec( 2169 encoder, numChannels, sampleRate, bitRate, aacProfile, 2170 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2171 pcmLimiterEnable); 2172 } 2173 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2174 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2175 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2176 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2177 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2178 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2179 // These are PCM-like formats with a fixed sample rate but 2180 // a variable number of channels. 2181 2182 int32_t numChannels; 2183 if (!msg->findInt32("channel-count", &numChannels)) { 2184 err = INVALID_OPERATION; 2185 } else { 2186 int32_t sampleRate; 2187 if (!msg->findInt32("sample-rate", &sampleRate)) { 2188 sampleRate = 8000; 2189 } 2190 err = setupG711Codec(encoder, sampleRate, numChannels); 2191 } 2192 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2193 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2194 if (encoder && 2195 (!msg->findInt32("channel-count", &numChannels) 2196 || !msg->findInt32("sample-rate", &sampleRate))) { 2197 ALOGE("missing channel count or sample rate for FLAC encoder"); 2198 err = INVALID_OPERATION; 2199 } else { 2200 if (encoder) { 2201 if (!msg->findInt32( 2202 "complexity", &compressionLevel) && 2203 !msg->findInt32( 2204 "flac-compression-level", &compressionLevel)) { 2205 compressionLevel = 5; // default FLAC compression level 2206 } else if (compressionLevel < 0) { 2207 ALOGW("compression level %d outside [0..8] range, " 2208 "using 0", 2209 compressionLevel); 2210 compressionLevel = 0; 2211 } else if (compressionLevel > 8) { 2212 ALOGW("compression level %d outside [0..8] range, " 2213 "using 8", 2214 compressionLevel); 2215 compressionLevel = 8; 2216 } 2217 } 2218 err = setupFlacCodec( 2219 encoder, numChannels, sampleRate, compressionLevel); 2220 } 2221 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2222 int32_t numChannels, sampleRate; 2223 if (encoder 2224 || !msg->findInt32("channel-count", &numChannels) 2225 || !msg->findInt32("sample-rate", &sampleRate)) { 2226 err = INVALID_OPERATION; 2227 } else { 2228 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); 2229 } 2230 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2231 int32_t numChannels; 2232 int32_t sampleRate; 2233 if (!msg->findInt32("channel-count", &numChannels) 2234 || !msg->findInt32("sample-rate", &sampleRate)) { 2235 err = INVALID_OPERATION; 2236 } else { 2237 err = setupAC3Codec(encoder, numChannels, sampleRate); 2238 } 2239 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2240 int32_t numChannels; 2241 int32_t sampleRate; 2242 if (!msg->findInt32("channel-count", &numChannels) 2243 || !msg->findInt32("sample-rate", &sampleRate)) { 2244 err = INVALID_OPERATION; 2245 } else { 2246 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2247 } 2248 } 2249 2250 if (err != OK) { 2251 return err; 2252 } 2253 2254 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2255 mEncoderDelay = 0; 2256 } 2257 2258 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2259 mEncoderPadding = 0; 2260 } 2261 2262 if (msg->findInt32("channel-mask", &mChannelMask)) { 2263 mChannelMaskPresent = true; 2264 } else { 2265 mChannelMaskPresent = false; 2266 } 2267 2268 int32_t maxInputSize; 2269 if (msg->findInt32("max-input-size", &maxInputSize)) { 2270 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2271 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2272 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2273 } 2274 2275 int32_t priority; 2276 if (msg->findInt32("priority", &priority)) { 2277 err = setPriority(priority); 2278 } 2279 2280 int32_t rateInt = -1; 2281 float rateFloat = -1; 2282 if (!msg->findFloat("operating-rate", &rateFloat)) { 2283 msg->findInt32("operating-rate", &rateInt); 2284 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2285 } 2286 if (rateFloat > 0) { 2287 err = setOperatingRate(rateFloat, video); 2288 } 2289 2290 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame. 2291 mBaseOutputFormat = outputFormat; 2292 // trigger a kWhatOutputFormatChanged msg on first buffer 2293 mLastOutputFormat.clear(); 2294 2295 err = getPortFormat(kPortIndexInput, inputFormat); 2296 if (err == OK) { 2297 err = getPortFormat(kPortIndexOutput, outputFormat); 2298 if (err == OK) { 2299 mInputFormat = inputFormat; 2300 mOutputFormat = outputFormat; 2301 } 2302 } 2303 2304 // create data converters if needed 2305 if (!video && err == OK) { 2306 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 2307 if (encoder) { 2308 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2309 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); 2310 if (mConverter[kPortIndexInput] != NULL) { 2311 mInputFormat->setInt32("pcm-encoding", pcmEncoding); 2312 } 2313 } else { 2314 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2315 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 2316 if (mConverter[kPortIndexOutput] != NULL) { 2317 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 2318 } 2319 } 2320 } 2321 2322 return err; 2323} 2324 2325status_t ACodec::setPriority(int32_t priority) { 2326 if (priority < 0) { 2327 return BAD_VALUE; 2328 } 2329 OMX_PARAM_U32TYPE config; 2330 InitOMXParams(&config); 2331 config.nU32 = (OMX_U32)priority; 2332 status_t temp = mOMX->setConfig( 2333 mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2334 &config, sizeof(config)); 2335 if (temp != OK) { 2336 ALOGI("codec does not support config priority (err %d)", temp); 2337 } 2338 return OK; 2339} 2340 2341status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2342 if (rateFloat < 0) { 2343 return BAD_VALUE; 2344 } 2345 OMX_U32 rate; 2346 if (isVideo) { 2347 if (rateFloat > 65535) { 2348 return BAD_VALUE; 2349 } 2350 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2351 } else { 2352 if (rateFloat > UINT_MAX) { 2353 return BAD_VALUE; 2354 } 2355 rate = (OMX_U32)(rateFloat); 2356 } 2357 OMX_PARAM_U32TYPE config; 2358 InitOMXParams(&config); 2359 config.nU32 = rate; 2360 status_t err = mOMX->setConfig( 2361 mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2362 &config, sizeof(config)); 2363 if (err != OK) { 2364 ALOGI("codec does not support config operating rate (err %d)", err); 2365 } 2366 return OK; 2367} 2368 2369status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2370 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2371 InitOMXParams(¶ms); 2372 params.nPortIndex = kPortIndexOutput; 2373 status_t err = mOMX->getConfig( 2374 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2375 if (err == OK) { 2376 *intraRefreshPeriod = params.nRefreshPeriod; 2377 return OK; 2378 } 2379 2380 // Fallback to query through standard OMX index. 2381 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2382 InitOMXParams(&refreshParams); 2383 refreshParams.nPortIndex = kPortIndexOutput; 2384 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2385 err = mOMX->getParameter( 2386 mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2387 if (err != OK || refreshParams.nCirMBs == 0) { 2388 *intraRefreshPeriod = 0; 2389 return OK; 2390 } 2391 2392 // Calculate period based on width and height 2393 uint32_t width, height; 2394 OMX_PARAM_PORTDEFINITIONTYPE def; 2395 InitOMXParams(&def); 2396 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2397 def.nPortIndex = kPortIndexOutput; 2398 err = mOMX->getParameter( 2399 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2400 if (err != OK) { 2401 *intraRefreshPeriod = 0; 2402 return err; 2403 } 2404 width = video_def->nFrameWidth; 2405 height = video_def->nFrameHeight; 2406 // Use H.264/AVC MacroBlock size 16x16 2407 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2408 2409 return OK; 2410} 2411 2412status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2413 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2414 InitOMXParams(¶ms); 2415 params.nPortIndex = kPortIndexOutput; 2416 params.nRefreshPeriod = intraRefreshPeriod; 2417 status_t err = mOMX->setConfig( 2418 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2419 if (err == OK) { 2420 return OK; 2421 } 2422 2423 // Only in configure state, a component could invoke setParameter. 2424 if (!inConfigure) { 2425 return INVALID_OPERATION; 2426 } else { 2427 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2428 } 2429 2430 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2431 InitOMXParams(&refreshParams); 2432 refreshParams.nPortIndex = kPortIndexOutput; 2433 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2434 2435 if (intraRefreshPeriod == 0) { 2436 // 0 means disable intra refresh. 2437 refreshParams.nCirMBs = 0; 2438 } else { 2439 // Calculate macroblocks that need to be intra coded base on width and height 2440 uint32_t width, height; 2441 OMX_PARAM_PORTDEFINITIONTYPE def; 2442 InitOMXParams(&def); 2443 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2444 def.nPortIndex = kPortIndexOutput; 2445 err = mOMX->getParameter( 2446 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2447 if (err != OK) { 2448 return err; 2449 } 2450 width = video_def->nFrameWidth; 2451 height = video_def->nFrameHeight; 2452 // Use H.264/AVC MacroBlock size 16x16 2453 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2454 } 2455 2456 err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh, 2457 &refreshParams, sizeof(refreshParams)); 2458 if (err != OK) { 2459 return err; 2460 } 2461 2462 return OK; 2463} 2464 2465status_t ACodec::configureTemporalLayers( 2466 const sp<AMessage> &msg, bool inConfigure, sp<AMessage> &outputFormat) { 2467 if (!mIsVideo || !mIsEncoder) { 2468 return INVALID_OPERATION; 2469 } 2470 2471 AString tsSchema; 2472 if (!msg->findString("ts-schema", &tsSchema)) { 2473 return OK; 2474 } 2475 2476 unsigned int numLayers = 0; 2477 unsigned int numBLayers = 0; 2478 int tags; 2479 char dummy; 2480 OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE pattern = 2481 OMX_VIDEO_AndroidTemporalLayeringPatternNone; 2482 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1 2483 && numLayers > 0) { 2484 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC; 2485 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c", 2486 &numLayers, &dummy, &numBLayers, &dummy)) 2487 && (tags == 1 || (tags == 3 && dummy == '+')) 2488 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) { 2489 numLayers += numBLayers; 2490 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid; 2491 } else { 2492 ALOGI("Ignoring unsupported ts-schema [%s]", tsSchema.c_str()); 2493 return BAD_VALUE; 2494 } 2495 2496 OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layerParams; 2497 InitOMXParams(&layerParams); 2498 layerParams.nPortIndex = kPortIndexOutput; 2499 2500 status_t err = mOMX->getParameter( 2501 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2502 &layerParams, sizeof(layerParams)); 2503 2504 if (err != OK) { 2505 return err; 2506 } else if (!(layerParams.eSupportedPatterns & pattern)) { 2507 return BAD_VALUE; 2508 } 2509 2510 numLayers = min(numLayers, layerParams.nLayerCountMax); 2511 numBLayers = min(numBLayers, layerParams.nBLayerCountMax); 2512 2513 if (!inConfigure) { 2514 OMX_VIDEO_CONFIG_ANDROID_TEMPORALLAYERINGTYPE layerConfig; 2515 InitOMXParams(&layerConfig); 2516 layerConfig.nPortIndex = kPortIndexOutput; 2517 layerConfig.ePattern = pattern; 2518 layerConfig.nPLayerCountActual = numLayers - numBLayers; 2519 layerConfig.nBLayerCountActual = numBLayers; 2520 layerConfig.bBitrateRatiosSpecified = OMX_FALSE; 2521 2522 err = mOMX->setConfig( 2523 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidVideoTemporalLayering, 2524 &layerConfig, sizeof(layerConfig)); 2525 } else { 2526 layerParams.ePattern = pattern; 2527 layerParams.nPLayerCountActual = numLayers - numBLayers; 2528 layerParams.nBLayerCountActual = numBLayers; 2529 layerParams.bBitrateRatiosSpecified = OMX_FALSE; 2530 2531 err = mOMX->setParameter( 2532 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2533 &layerParams, sizeof(layerParams)); 2534 } 2535 2536 AString configSchema; 2537 if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternAndroid) { 2538 configSchema = AStringPrintf("android.generic.%u+%u", numLayers - numBLayers, numBLayers); 2539 } else if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC) { 2540 configSchema = AStringPrintf("webrtc.vp8.%u", numLayers); 2541 } 2542 2543 if (err != OK) { 2544 ALOGW("Failed to set temporal layers to %s (requested %s)", 2545 configSchema.c_str(), tsSchema.c_str()); 2546 return err; 2547 } 2548 2549 err = mOMX->getParameter( 2550 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2551 &layerParams, sizeof(layerParams)); 2552 2553 if (err == OK) { 2554 ALOGD("Temporal layers requested:%s configured:%s got:%s(%u: P=%u, B=%u)", 2555 tsSchema.c_str(), configSchema.c_str(), 2556 asString(layerParams.ePattern), layerParams.ePattern, 2557 layerParams.nPLayerCountActual, layerParams.nBLayerCountActual); 2558 2559 if (outputFormat.get() == mOutputFormat.get()) { 2560 mOutputFormat = mOutputFormat->dup(); // trigger an output format change event 2561 } 2562 // assume we got what we configured 2563 outputFormat->setString("ts-schema", configSchema); 2564 } 2565 return err; 2566} 2567 2568status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2569 OMX_PARAM_PORTDEFINITIONTYPE def; 2570 InitOMXParams(&def); 2571 def.nPortIndex = portIndex; 2572 2573 status_t err = mOMX->getParameter( 2574 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2575 2576 if (err != OK) { 2577 return err; 2578 } 2579 2580 if (def.nBufferSize >= size) { 2581 return OK; 2582 } 2583 2584 def.nBufferSize = size; 2585 2586 err = mOMX->setParameter( 2587 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2588 2589 if (err != OK) { 2590 return err; 2591 } 2592 2593 err = mOMX->getParameter( 2594 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2595 2596 if (err != OK) { 2597 return err; 2598 } 2599 2600 if (def.nBufferSize < size) { 2601 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2602 return FAILED_TRANSACTION; 2603 } 2604 2605 return OK; 2606} 2607 2608status_t ACodec::selectAudioPortFormat( 2609 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2610 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2611 InitOMXParams(&format); 2612 2613 format.nPortIndex = portIndex; 2614 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2615 format.nIndex = index; 2616 status_t err = mOMX->getParameter( 2617 mNode, OMX_IndexParamAudioPortFormat, 2618 &format, sizeof(format)); 2619 2620 if (err != OK) { 2621 return err; 2622 } 2623 2624 if (format.eEncoding == desiredFormat) { 2625 break; 2626 } 2627 2628 if (index == kMaxIndicesToCheck) { 2629 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2630 mComponentName.c_str(), index, 2631 asString(format.eEncoding), format.eEncoding); 2632 return ERROR_UNSUPPORTED; 2633 } 2634 } 2635 2636 return mOMX->setParameter( 2637 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2638} 2639 2640status_t ACodec::setupAACCodec( 2641 bool encoder, int32_t numChannels, int32_t sampleRate, 2642 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2643 int32_t maxOutputChannelCount, const drcParams_t& drc, 2644 int32_t pcmLimiterEnable) { 2645 if (encoder && isADTS) { 2646 return -EINVAL; 2647 } 2648 2649 status_t err = setupRawAudioFormat( 2650 encoder ? kPortIndexInput : kPortIndexOutput, 2651 sampleRate, 2652 numChannels); 2653 2654 if (err != OK) { 2655 return err; 2656 } 2657 2658 if (encoder) { 2659 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2660 2661 if (err != OK) { 2662 return err; 2663 } 2664 2665 OMX_PARAM_PORTDEFINITIONTYPE def; 2666 InitOMXParams(&def); 2667 def.nPortIndex = kPortIndexOutput; 2668 2669 err = mOMX->getParameter( 2670 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2671 2672 if (err != OK) { 2673 return err; 2674 } 2675 2676 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2677 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2678 2679 err = mOMX->setParameter( 2680 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2681 2682 if (err != OK) { 2683 return err; 2684 } 2685 2686 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2687 InitOMXParams(&profile); 2688 profile.nPortIndex = kPortIndexOutput; 2689 2690 err = mOMX->getParameter( 2691 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2692 2693 if (err != OK) { 2694 return err; 2695 } 2696 2697 profile.nChannels = numChannels; 2698 2699 profile.eChannelMode = 2700 (numChannels == 1) 2701 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2702 2703 profile.nSampleRate = sampleRate; 2704 profile.nBitRate = bitRate; 2705 profile.nAudioBandWidth = 0; 2706 profile.nFrameLength = 0; 2707 profile.nAACtools = OMX_AUDIO_AACToolAll; 2708 profile.nAACERtools = OMX_AUDIO_AACERNone; 2709 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2710 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2711 switch (sbrMode) { 2712 case 0: 2713 // disable sbr 2714 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2715 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2716 break; 2717 case 1: 2718 // enable single-rate sbr 2719 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2720 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2721 break; 2722 case 2: 2723 // enable dual-rate sbr 2724 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2725 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2726 break; 2727 case -1: 2728 // enable both modes -> the codec will decide which mode should be used 2729 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2730 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2731 break; 2732 default: 2733 // unsupported sbr mode 2734 return BAD_VALUE; 2735 } 2736 2737 2738 err = mOMX->setParameter( 2739 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2740 2741 if (err != OK) { 2742 return err; 2743 } 2744 2745 return err; 2746 } 2747 2748 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2749 InitOMXParams(&profile); 2750 profile.nPortIndex = kPortIndexInput; 2751 2752 err = mOMX->getParameter( 2753 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2754 2755 if (err != OK) { 2756 return err; 2757 } 2758 2759 profile.nChannels = numChannels; 2760 profile.nSampleRate = sampleRate; 2761 2762 profile.eAACStreamFormat = 2763 isADTS 2764 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2765 : OMX_AUDIO_AACStreamFormatMP4FF; 2766 2767 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2768 InitOMXParams(&presentation); 2769 presentation.nMaxOutputChannels = maxOutputChannelCount; 2770 presentation.nDrcCut = drc.drcCut; 2771 presentation.nDrcBoost = drc.drcBoost; 2772 presentation.nHeavyCompression = drc.heavyCompression; 2773 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2774 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2775 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2776 2777 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2778 if (res == OK) { 2779 // optional parameters, will not cause configuration failure 2780 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2781 &presentation, sizeof(presentation)); 2782 } else { 2783 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2784 } 2785 return res; 2786} 2787 2788status_t ACodec::setupAC3Codec( 2789 bool encoder, int32_t numChannels, int32_t sampleRate) { 2790 status_t err = setupRawAudioFormat( 2791 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2792 2793 if (err != OK) { 2794 return err; 2795 } 2796 2797 if (encoder) { 2798 ALOGW("AC3 encoding is not supported."); 2799 return INVALID_OPERATION; 2800 } 2801 2802 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2803 InitOMXParams(&def); 2804 def.nPortIndex = kPortIndexInput; 2805 2806 err = mOMX->getParameter( 2807 mNode, 2808 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2809 &def, 2810 sizeof(def)); 2811 2812 if (err != OK) { 2813 return err; 2814 } 2815 2816 def.nChannels = numChannels; 2817 def.nSampleRate = sampleRate; 2818 2819 return mOMX->setParameter( 2820 mNode, 2821 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2822 &def, 2823 sizeof(def)); 2824} 2825 2826status_t ACodec::setupEAC3Codec( 2827 bool encoder, int32_t numChannels, int32_t sampleRate) { 2828 status_t err = setupRawAudioFormat( 2829 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2830 2831 if (err != OK) { 2832 return err; 2833 } 2834 2835 if (encoder) { 2836 ALOGW("EAC3 encoding is not supported."); 2837 return INVALID_OPERATION; 2838 } 2839 2840 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2841 InitOMXParams(&def); 2842 def.nPortIndex = kPortIndexInput; 2843 2844 err = mOMX->getParameter( 2845 mNode, 2846 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2847 &def, 2848 sizeof(def)); 2849 2850 if (err != OK) { 2851 return err; 2852 } 2853 2854 def.nChannels = numChannels; 2855 def.nSampleRate = sampleRate; 2856 2857 return mOMX->setParameter( 2858 mNode, 2859 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2860 &def, 2861 sizeof(def)); 2862} 2863 2864static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2865 bool isAMRWB, int32_t bps) { 2866 if (isAMRWB) { 2867 if (bps <= 6600) { 2868 return OMX_AUDIO_AMRBandModeWB0; 2869 } else if (bps <= 8850) { 2870 return OMX_AUDIO_AMRBandModeWB1; 2871 } else if (bps <= 12650) { 2872 return OMX_AUDIO_AMRBandModeWB2; 2873 } else if (bps <= 14250) { 2874 return OMX_AUDIO_AMRBandModeWB3; 2875 } else if (bps <= 15850) { 2876 return OMX_AUDIO_AMRBandModeWB4; 2877 } else if (bps <= 18250) { 2878 return OMX_AUDIO_AMRBandModeWB5; 2879 } else if (bps <= 19850) { 2880 return OMX_AUDIO_AMRBandModeWB6; 2881 } else if (bps <= 23050) { 2882 return OMX_AUDIO_AMRBandModeWB7; 2883 } 2884 2885 // 23850 bps 2886 return OMX_AUDIO_AMRBandModeWB8; 2887 } else { // AMRNB 2888 if (bps <= 4750) { 2889 return OMX_AUDIO_AMRBandModeNB0; 2890 } else if (bps <= 5150) { 2891 return OMX_AUDIO_AMRBandModeNB1; 2892 } else if (bps <= 5900) { 2893 return OMX_AUDIO_AMRBandModeNB2; 2894 } else if (bps <= 6700) { 2895 return OMX_AUDIO_AMRBandModeNB3; 2896 } else if (bps <= 7400) { 2897 return OMX_AUDIO_AMRBandModeNB4; 2898 } else if (bps <= 7950) { 2899 return OMX_AUDIO_AMRBandModeNB5; 2900 } else if (bps <= 10200) { 2901 return OMX_AUDIO_AMRBandModeNB6; 2902 } 2903 2904 // 12200 bps 2905 return OMX_AUDIO_AMRBandModeNB7; 2906 } 2907} 2908 2909status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2910 OMX_AUDIO_PARAM_AMRTYPE def; 2911 InitOMXParams(&def); 2912 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2913 2914 status_t err = 2915 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2916 2917 if (err != OK) { 2918 return err; 2919 } 2920 2921 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2922 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2923 2924 err = mOMX->setParameter( 2925 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2926 2927 if (err != OK) { 2928 return err; 2929 } 2930 2931 return setupRawAudioFormat( 2932 encoder ? kPortIndexInput : kPortIndexOutput, 2933 isWAMR ? 16000 : 8000 /* sampleRate */, 2934 1 /* numChannels */); 2935} 2936 2937status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2938 if (encoder) { 2939 return INVALID_OPERATION; 2940 } 2941 2942 return setupRawAudioFormat( 2943 kPortIndexInput, sampleRate, numChannels); 2944} 2945 2946status_t ACodec::setupFlacCodec( 2947 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2948 2949 if (encoder) { 2950 OMX_AUDIO_PARAM_FLACTYPE def; 2951 InitOMXParams(&def); 2952 def.nPortIndex = kPortIndexOutput; 2953 2954 // configure compression level 2955 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2956 if (err != OK) { 2957 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2958 return err; 2959 } 2960 def.nCompressionLevel = compressionLevel; 2961 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2962 if (err != OK) { 2963 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2964 return err; 2965 } 2966 } 2967 2968 return setupRawAudioFormat( 2969 encoder ? kPortIndexInput : kPortIndexOutput, 2970 sampleRate, 2971 numChannels); 2972} 2973 2974status_t ACodec::setupRawAudioFormat( 2975 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { 2976 OMX_PARAM_PORTDEFINITIONTYPE def; 2977 InitOMXParams(&def); 2978 def.nPortIndex = portIndex; 2979 2980 status_t err = mOMX->getParameter( 2981 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2982 2983 if (err != OK) { 2984 return err; 2985 } 2986 2987 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2988 2989 err = mOMX->setParameter( 2990 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2991 2992 if (err != OK) { 2993 return err; 2994 } 2995 2996 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2997 InitOMXParams(&pcmParams); 2998 pcmParams.nPortIndex = portIndex; 2999 3000 err = mOMX->getParameter( 3001 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 3002 3003 if (err != OK) { 3004 return err; 3005 } 3006 3007 pcmParams.nChannels = numChannels; 3008 switch (encoding) { 3009 case kAudioEncodingPcm8bit: 3010 pcmParams.eNumData = OMX_NumericalDataUnsigned; 3011 pcmParams.nBitPerSample = 8; 3012 break; 3013 case kAudioEncodingPcmFloat: 3014 pcmParams.eNumData = OMX_NumericalDataFloat; 3015 pcmParams.nBitPerSample = 32; 3016 break; 3017 case kAudioEncodingPcm16bit: 3018 pcmParams.eNumData = OMX_NumericalDataSigned; 3019 pcmParams.nBitPerSample = 16; 3020 break; 3021 default: 3022 return BAD_VALUE; 3023 } 3024 pcmParams.bInterleaved = OMX_TRUE; 3025 pcmParams.nSamplingRate = sampleRate; 3026 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 3027 3028 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 3029 return OMX_ErrorNone; 3030 } 3031 3032 err = mOMX->setParameter( 3033 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 3034 // if we could not set up raw format to non-16-bit, try with 16-bit 3035 // NOTE: we will also verify this via readback, in case codec ignores these fields 3036 if (err != OK && encoding != kAudioEncodingPcm16bit) { 3037 pcmParams.eNumData = OMX_NumericalDataSigned; 3038 pcmParams.nBitPerSample = 16; 3039 err = mOMX->setParameter( 3040 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 3041 } 3042 return err; 3043} 3044 3045status_t ACodec::configureTunneledVideoPlayback( 3046 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 3047 native_handle_t* sidebandHandle; 3048 3049 status_t err = mOMX->configureVideoTunnelMode( 3050 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 3051 if (err != OK) { 3052 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 3053 return err; 3054 } 3055 3056 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 3057 if (err != OK) { 3058 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 3059 sidebandHandle, err); 3060 return err; 3061 } 3062 3063 return OK; 3064} 3065 3066status_t ACodec::setVideoPortFormatType( 3067 OMX_U32 portIndex, 3068 OMX_VIDEO_CODINGTYPE compressionFormat, 3069 OMX_COLOR_FORMATTYPE colorFormat, 3070 bool usingNativeBuffers) { 3071 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 3072 InitOMXParams(&format); 3073 format.nPortIndex = portIndex; 3074 format.nIndex = 0; 3075 bool found = false; 3076 3077 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 3078 format.nIndex = index; 3079 status_t err = mOMX->getParameter( 3080 mNode, OMX_IndexParamVideoPortFormat, 3081 &format, sizeof(format)); 3082 3083 if (err != OK) { 3084 return err; 3085 } 3086 3087 // substitute back flexible color format to codec supported format 3088 OMX_U32 flexibleEquivalent; 3089 if (compressionFormat == OMX_VIDEO_CodingUnused 3090 && isFlexibleColorFormat( 3091 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 3092 && colorFormat == flexibleEquivalent) { 3093 ALOGI("[%s] using color format %#x in place of %#x", 3094 mComponentName.c_str(), format.eColorFormat, colorFormat); 3095 colorFormat = format.eColorFormat; 3096 } 3097 3098 // The following assertion is violated by TI's video decoder. 3099 // CHECK_EQ(format.nIndex, index); 3100 3101 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 3102 if (portIndex == kPortIndexInput 3103 && colorFormat == format.eColorFormat) { 3104 // eCompressionFormat does not seem right. 3105 found = true; 3106 break; 3107 } 3108 if (portIndex == kPortIndexOutput 3109 && compressionFormat == format.eCompressionFormat) { 3110 // eColorFormat does not seem right. 3111 found = true; 3112 break; 3113 } 3114 } 3115 3116 if (format.eCompressionFormat == compressionFormat 3117 && format.eColorFormat == colorFormat) { 3118 found = true; 3119 break; 3120 } 3121 3122 if (index == kMaxIndicesToCheck) { 3123 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 3124 mComponentName.c_str(), index, 3125 asString(format.eCompressionFormat), format.eCompressionFormat, 3126 asString(format.eColorFormat), format.eColorFormat); 3127 } 3128 } 3129 3130 if (!found) { 3131 return UNKNOWN_ERROR; 3132 } 3133 3134 status_t err = mOMX->setParameter( 3135 mNode, OMX_IndexParamVideoPortFormat, 3136 &format, sizeof(format)); 3137 3138 return err; 3139} 3140 3141// Set optimal output format. OMX component lists output formats in the order 3142// of preference, but this got more complicated since the introduction of flexible 3143// YUV formats. We support a legacy behavior for applications that do not use 3144// surface output, do not specify an output format, but expect a "usable" standard 3145// OMX format. SW readable and standard formats must be flex-YUV. 3146// 3147// Suggested preference order: 3148// - optimal format for texture rendering (mediaplayer behavior) 3149// - optimal SW readable & texture renderable format (flex-YUV support) 3150// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 3151// - legacy "usable" standard formats 3152// 3153// For legacy support, we prefer a standard format, but will settle for a SW readable 3154// flex-YUV format. 3155status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 3156 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 3157 InitOMXParams(&format); 3158 format.nPortIndex = kPortIndexOutput; 3159 3160 InitOMXParams(&legacyFormat); 3161 // this field will change when we find a suitable legacy format 3162 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 3163 3164 for (OMX_U32 index = 0; ; ++index) { 3165 format.nIndex = index; 3166 status_t err = mOMX->getParameter( 3167 mNode, OMX_IndexParamVideoPortFormat, 3168 &format, sizeof(format)); 3169 if (err != OK) { 3170 // no more formats, pick legacy format if found 3171 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 3172 memcpy(&format, &legacyFormat, sizeof(format)); 3173 break; 3174 } 3175 return err; 3176 } 3177 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 3178 return OMX_ErrorBadParameter; 3179 } 3180 if (!getLegacyFlexibleFormat) { 3181 break; 3182 } 3183 // standard formats that were exposed to users before 3184 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 3185 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 3186 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 3187 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 3188 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 3189 break; 3190 } 3191 // find best legacy non-standard format 3192 OMX_U32 flexibleEquivalent; 3193 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 3194 && isFlexibleColorFormat( 3195 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */, 3196 &flexibleEquivalent) 3197 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 3198 memcpy(&legacyFormat, &format, sizeof(format)); 3199 } 3200 } 3201 return mOMX->setParameter( 3202 mNode, OMX_IndexParamVideoPortFormat, 3203 &format, sizeof(format)); 3204} 3205 3206static const struct VideoCodingMapEntry { 3207 const char *mMime; 3208 OMX_VIDEO_CODINGTYPE mVideoCodingType; 3209} kVideoCodingMapEntry[] = { 3210 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 3211 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 3212 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 3213 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 3214 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 3215 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 3216 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 3217 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 3218}; 3219 3220static status_t GetVideoCodingTypeFromMime( 3221 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3222 for (size_t i = 0; 3223 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3224 ++i) { 3225 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3226 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3227 return OK; 3228 } 3229 } 3230 3231 *codingType = OMX_VIDEO_CodingUnused; 3232 3233 return ERROR_UNSUPPORTED; 3234} 3235 3236static status_t GetMimeTypeForVideoCoding( 3237 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3238 for (size_t i = 0; 3239 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3240 ++i) { 3241 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3242 *mime = kVideoCodingMapEntry[i].mMime; 3243 return OK; 3244 } 3245 } 3246 3247 mime->clear(); 3248 3249 return ERROR_UNSUPPORTED; 3250} 3251 3252status_t ACodec::setPortBufferNum(OMX_U32 portIndex, int bufferNum) { 3253 OMX_PARAM_PORTDEFINITIONTYPE def; 3254 InitOMXParams(&def); 3255 def.nPortIndex = portIndex; 3256 status_t err; 3257 ALOGD("Setting [%s] %s port buffer number: %d", mComponentName.c_str(), 3258 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3259 err = mOMX->getParameter( 3260 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3261 if (err != OK) { 3262 return err; 3263 } 3264 def.nBufferCountActual = bufferNum; 3265 err = mOMX->setParameter( 3266 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3267 if (err != OK) { 3268 // Component could reject this request. 3269 ALOGW("Fail to set [%s] %s port buffer number: %d", mComponentName.c_str(), 3270 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3271 } 3272 return OK; 3273} 3274 3275status_t ACodec::setupVideoDecoder( 3276 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3277 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3278 int32_t width, height; 3279 if (!msg->findInt32("width", &width) 3280 || !msg->findInt32("height", &height)) { 3281 return INVALID_OPERATION; 3282 } 3283 3284 OMX_VIDEO_CODINGTYPE compressionFormat; 3285 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3286 3287 if (err != OK) { 3288 return err; 3289 } 3290 3291 if (compressionFormat == OMX_VIDEO_CodingVP9) { 3292 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 3293 InitOMXParams(¶ms); 3294 params.nPortIndex = kPortIndexInput; 3295 // Check if VP9 decoder advertises supported profiles. 3296 params.nProfileIndex = 0; 3297 status_t err = mOMX->getParameter( 3298 mNode, 3299 OMX_IndexParamVideoProfileLevelQuerySupported, 3300 ¶ms, 3301 sizeof(params)); 3302 mIsLegacyVP9Decoder = err != OK; 3303 } 3304 3305 err = setVideoPortFormatType( 3306 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3307 3308 if (err != OK) { 3309 return err; 3310 } 3311 3312 int32_t tmp; 3313 if (msg->findInt32("color-format", &tmp)) { 3314 OMX_COLOR_FORMATTYPE colorFormat = 3315 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3316 err = setVideoPortFormatType( 3317 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3318 if (err != OK) { 3319 ALOGW("[%s] does not support color format %d", 3320 mComponentName.c_str(), colorFormat); 3321 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3322 } 3323 } else { 3324 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3325 } 3326 3327 if (err != OK) { 3328 return err; 3329 } 3330 3331 // Set the component input buffer number to be |tmp|. If succeed, 3332 // component will set input port buffer number to be |tmp|. If fail, 3333 // component will keep the same buffer number as before. 3334 if (msg->findInt32("android._num-input-buffers", &tmp)) { 3335 err = setPortBufferNum(kPortIndexInput, tmp); 3336 if (err != OK) 3337 return err; 3338 } 3339 3340 // Set the component output buffer number to be |tmp|. If succeed, 3341 // component will set output port buffer number to be |tmp|. If fail, 3342 // component will keep the same buffer number as before. 3343 if (msg->findInt32("android._num-output-buffers", &tmp)) { 3344 err = setPortBufferNum(kPortIndexOutput, tmp); 3345 if (err != OK) 3346 return err; 3347 } 3348 3349 int32_t frameRateInt; 3350 float frameRateFloat; 3351 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3352 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3353 frameRateInt = -1; 3354 } 3355 frameRateFloat = (float)frameRateInt; 3356 } 3357 3358 err = setVideoFormatOnPort( 3359 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3360 3361 if (err != OK) { 3362 return err; 3363 } 3364 3365 err = setVideoFormatOnPort( 3366 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3367 3368 if (err != OK) { 3369 return err; 3370 } 3371 3372 err = setColorAspectsForVideoDecoder( 3373 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3374 if (err == ERROR_UNSUPPORTED) { // support is optional 3375 err = OK; 3376 } 3377 3378 if (err != OK) { 3379 return err; 3380 } 3381 3382 err = setHDRStaticInfoForVideoCodec(kPortIndexOutput, msg, outputFormat); 3383 if (err == ERROR_UNSUPPORTED) { // support is optional 3384 err = OK; 3385 } 3386 return err; 3387} 3388 3389status_t ACodec::initDescribeColorAspectsIndex() { 3390 status_t err = mOMX->getExtensionIndex( 3391 mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3392 if (err != OK) { 3393 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3394 } 3395 return err; 3396} 3397 3398status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3399 status_t err = ERROR_UNSUPPORTED; 3400 if (mDescribeColorAspectsIndex) { 3401 err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3402 } 3403 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3404 mComponentName.c_str(), 3405 params.sAspects.mRange, asString(params.sAspects.mRange), 3406 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3407 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3408 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3409 err, asString(err)); 3410 3411 if (verify && err == OK) { 3412 err = getCodecColorAspects(params); 3413 } 3414 3415 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3416 "[%s] setting color aspects failed even though codec advertises support", 3417 mComponentName.c_str()); 3418 return err; 3419} 3420 3421status_t ACodec::setColorAspectsForVideoDecoder( 3422 int32_t width, int32_t height, bool usingNativeWindow, 3423 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3424 DescribeColorAspectsParams params; 3425 InitOMXParams(¶ms); 3426 params.nPortIndex = kPortIndexOutput; 3427 3428 getColorAspectsFromFormat(configFormat, params.sAspects); 3429 if (usingNativeWindow) { 3430 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3431 // The default aspects will be set back to the output format during the 3432 // getFormat phase of configure(). Set non-Unspecified values back into the 3433 // format, in case component does not support this enumeration. 3434 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3435 } 3436 3437 (void)initDescribeColorAspectsIndex(); 3438 3439 // communicate color aspects to codec 3440 return setCodecColorAspects(params); 3441} 3442 3443status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3444 status_t err = ERROR_UNSUPPORTED; 3445 if (mDescribeColorAspectsIndex) { 3446 err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3447 } 3448 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3449 mComponentName.c_str(), 3450 params.sAspects.mRange, asString(params.sAspects.mRange), 3451 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3452 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3453 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3454 err, asString(err)); 3455 if (params.bRequestingDataSpace) { 3456 ALOGV("for dataspace %#x", params.nDataSpace); 3457 } 3458 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3459 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3460 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3461 mComponentName.c_str()); 3462 } 3463 return err; 3464} 3465 3466status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3467 DescribeColorAspectsParams params; 3468 InitOMXParams(¶ms); 3469 params.nPortIndex = kPortIndexInput; 3470 status_t err = getCodecColorAspects(params); 3471 if (err == OK) { 3472 // we only set encoder input aspects if codec supports them 3473 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3474 } 3475 return err; 3476} 3477 3478status_t ACodec::getDataSpace( 3479 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3480 bool tryCodec) { 3481 status_t err = OK; 3482 if (tryCodec) { 3483 // request dataspace guidance from codec. 3484 params.bRequestingDataSpace = OMX_TRUE; 3485 err = getCodecColorAspects(params); 3486 params.bRequestingDataSpace = OMX_FALSE; 3487 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3488 *dataSpace = (android_dataspace)params.nDataSpace; 3489 return err; 3490 } else if (err == ERROR_UNSUPPORTED) { 3491 // ignore not-implemented error for dataspace requests 3492 err = OK; 3493 } 3494 } 3495 3496 // this returns legacy versions if available 3497 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3498 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3499 "and dataspace %#x", 3500 mComponentName.c_str(), 3501 params.sAspects.mRange, asString(params.sAspects.mRange), 3502 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3503 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3504 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3505 *dataSpace); 3506 return err; 3507} 3508 3509 3510status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3511 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3512 android_dataspace *dataSpace) { 3513 DescribeColorAspectsParams params; 3514 InitOMXParams(¶ms); 3515 params.nPortIndex = kPortIndexOutput; 3516 3517 // reset default format and get resulting format 3518 getColorAspectsFromFormat(configFormat, params.sAspects); 3519 if (dataSpace != NULL) { 3520 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3521 } 3522 status_t err = setCodecColorAspects(params, true /* readBack */); 3523 3524 // we always set specified aspects for decoders 3525 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3526 3527 if (dataSpace != NULL) { 3528 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3529 if (err == OK) { 3530 err = res; 3531 } 3532 } 3533 3534 return err; 3535} 3536 3537// initial video encoder setup for bytebuffer mode 3538status_t ACodec::setColorAspectsForVideoEncoder( 3539 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3540 // copy config to output format as this is not exposed via getFormat 3541 copyColorConfig(configFormat, outputFormat); 3542 3543 DescribeColorAspectsParams params; 3544 InitOMXParams(¶ms); 3545 params.nPortIndex = kPortIndexInput; 3546 getColorAspectsFromFormat(configFormat, params.sAspects); 3547 3548 (void)initDescribeColorAspectsIndex(); 3549 3550 int32_t usingRecorder; 3551 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3552 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3553 int32_t width, height; 3554 if (configFormat->findInt32("width", &width) 3555 && configFormat->findInt32("height", &height)) { 3556 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3557 status_t err = getDataSpace( 3558 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3559 if (err != OK) { 3560 return err; 3561 } 3562 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3563 } 3564 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3565 } 3566 3567 // communicate color aspects to codec, but do not allow change of the platform aspects 3568 ColorAspects origAspects = params.sAspects; 3569 for (int triesLeft = 2; --triesLeft >= 0; ) { 3570 status_t err = setCodecColorAspects(params, true /* readBack */); 3571 if (err != OK 3572 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3573 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3574 return err; 3575 } 3576 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3577 mComponentName.c_str()); 3578 } 3579 return OK; 3580} 3581 3582status_t ACodec::setHDRStaticInfoForVideoCodec( 3583 OMX_U32 portIndex, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3584 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3585 3586 DescribeHDRStaticInfoParams params; 3587 InitOMXParams(¶ms); 3588 params.nPortIndex = portIndex; 3589 3590 HDRStaticInfo *info = ¶ms.sInfo; 3591 if (getHDRStaticInfoFromFormat(configFormat, info)) { 3592 setHDRStaticInfoIntoFormat(params.sInfo, outputFormat); 3593 } 3594 3595 (void)initDescribeHDRStaticInfoIndex(); 3596 3597 // communicate HDR static Info to codec 3598 return setHDRStaticInfo(params); 3599} 3600 3601// subsequent initial video encoder setup for surface mode 3602status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3603 android_dataspace *dataSpace /* nonnull */) { 3604 DescribeColorAspectsParams params; 3605 InitOMXParams(¶ms); 3606 params.nPortIndex = kPortIndexInput; 3607 ColorAspects &aspects = params.sAspects; 3608 3609 // reset default format and store resulting format into both input and output formats 3610 getColorAspectsFromFormat(mConfigFormat, aspects); 3611 int32_t width, height; 3612 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3613 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3614 } 3615 setColorAspectsIntoFormat(aspects, mInputFormat); 3616 setColorAspectsIntoFormat(aspects, mOutputFormat); 3617 3618 // communicate color aspects to codec, but do not allow any change 3619 ColorAspects origAspects = aspects; 3620 status_t err = OK; 3621 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3622 status_t err = setCodecColorAspects(params, true /* readBack */); 3623 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3624 break; 3625 } 3626 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3627 mComponentName.c_str()); 3628 } 3629 3630 *dataSpace = HAL_DATASPACE_BT709; 3631 aspects = origAspects; // restore desired color aspects 3632 status_t res = getDataSpace( 3633 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3634 if (err == OK) { 3635 err = res; 3636 } 3637 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3638 mInputFormat->setBuffer( 3639 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3640 3641 // update input format with codec supported color aspects (basically set unsupported 3642 // aspects to Unspecified) 3643 if (err == OK) { 3644 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3645 } 3646 3647 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3648 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3649 3650 return err; 3651} 3652 3653status_t ACodec::getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format) { 3654 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3655 DescribeHDRStaticInfoParams params; 3656 InitOMXParams(¶ms); 3657 params.nPortIndex = portIndex; 3658 3659 status_t err = getHDRStaticInfo(params); 3660 if (err == OK) { 3661 // we only set decodec output HDRStaticInfo if codec supports them 3662 setHDRStaticInfoIntoFormat(params.sInfo, format); 3663 } 3664 return err; 3665} 3666 3667status_t ACodec::initDescribeHDRStaticInfoIndex() { 3668 status_t err = mOMX->getExtensionIndex( 3669 mNode, "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex); 3670 if (err != OK) { 3671 mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0; 3672 } 3673 return err; 3674} 3675 3676status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) { 3677 status_t err = ERROR_UNSUPPORTED; 3678 if (mDescribeHDRStaticInfoIndex) { 3679 err = mOMX->setConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3680 } 3681 3682 const HDRStaticInfo *info = ¶ms.sInfo; 3683 ALOGV("[%s] setting HDRStaticInfo (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, " 3684 "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)", 3685 mComponentName.c_str(), 3686 info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y, 3687 info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y, 3688 info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance, 3689 info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel); 3690 3691 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3692 "[%s] setting HDRStaticInfo failed even though codec advertises support", 3693 mComponentName.c_str()); 3694 return err; 3695} 3696 3697status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms) { 3698 status_t err = ERROR_UNSUPPORTED; 3699 if (mDescribeHDRStaticInfoIndex) { 3700 err = mOMX->getConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3701 } 3702 3703 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3704 "[%s] getting HDRStaticInfo failed even though codec advertises support", 3705 mComponentName.c_str()); 3706 return err; 3707} 3708 3709status_t ACodec::setupVideoEncoder( 3710 const char *mime, const sp<AMessage> &msg, 3711 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3712 int32_t tmp; 3713 if (!msg->findInt32("color-format", &tmp)) { 3714 return INVALID_OPERATION; 3715 } 3716 3717 OMX_COLOR_FORMATTYPE colorFormat = 3718 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3719 3720 status_t err = setVideoPortFormatType( 3721 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3722 3723 if (err != OK) { 3724 ALOGE("[%s] does not support color format %d", 3725 mComponentName.c_str(), colorFormat); 3726 3727 return err; 3728 } 3729 3730 /* Input port configuration */ 3731 3732 OMX_PARAM_PORTDEFINITIONTYPE def; 3733 InitOMXParams(&def); 3734 3735 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3736 3737 def.nPortIndex = kPortIndexInput; 3738 3739 err = mOMX->getParameter( 3740 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3741 3742 if (err != OK) { 3743 return err; 3744 } 3745 3746 int32_t width, height, bitrate; 3747 if (!msg->findInt32("width", &width) 3748 || !msg->findInt32("height", &height) 3749 || !msg->findInt32("bitrate", &bitrate)) { 3750 return INVALID_OPERATION; 3751 } 3752 3753 video_def->nFrameWidth = width; 3754 video_def->nFrameHeight = height; 3755 3756 int32_t stride; 3757 if (!msg->findInt32("stride", &stride)) { 3758 stride = width; 3759 } 3760 3761 video_def->nStride = stride; 3762 3763 int32_t sliceHeight; 3764 if (!msg->findInt32("slice-height", &sliceHeight)) { 3765 sliceHeight = height; 3766 } 3767 3768 video_def->nSliceHeight = sliceHeight; 3769 3770 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3771 3772 float frameRate; 3773 if (!msg->findFloat("frame-rate", &frameRate)) { 3774 int32_t tmp; 3775 if (!msg->findInt32("frame-rate", &tmp)) { 3776 return INVALID_OPERATION; 3777 } 3778 frameRate = (float)tmp; 3779 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3780 } 3781 3782 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3783 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3784 // this is redundant as it was already set up in setVideoPortFormatType 3785 // FIXME for now skip this only for flexible YUV formats 3786 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3787 video_def->eColorFormat = colorFormat; 3788 } 3789 3790 err = mOMX->setParameter( 3791 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3792 3793 if (err != OK) { 3794 ALOGE("[%s] failed to set input port definition parameters.", 3795 mComponentName.c_str()); 3796 3797 return err; 3798 } 3799 3800 /* Output port configuration */ 3801 3802 OMX_VIDEO_CODINGTYPE compressionFormat; 3803 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3804 3805 if (err != OK) { 3806 return err; 3807 } 3808 3809 err = setVideoPortFormatType( 3810 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3811 3812 if (err != OK) { 3813 ALOGE("[%s] does not support compression format %d", 3814 mComponentName.c_str(), compressionFormat); 3815 3816 return err; 3817 } 3818 3819 def.nPortIndex = kPortIndexOutput; 3820 3821 err = mOMX->getParameter( 3822 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3823 3824 if (err != OK) { 3825 return err; 3826 } 3827 3828 video_def->nFrameWidth = width; 3829 video_def->nFrameHeight = height; 3830 video_def->xFramerate = 0; 3831 video_def->nBitrate = bitrate; 3832 video_def->eCompressionFormat = compressionFormat; 3833 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3834 3835 err = mOMX->setParameter( 3836 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3837 3838 if (err != OK) { 3839 ALOGE("[%s] failed to set output port definition parameters.", 3840 mComponentName.c_str()); 3841 3842 return err; 3843 } 3844 3845 int32_t intraRefreshPeriod = 0; 3846 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3847 && intraRefreshPeriod >= 0) { 3848 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3849 if (err != OK) { 3850 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3851 mComponentName.c_str()); 3852 err = OK; 3853 } 3854 } 3855 3856 switch (compressionFormat) { 3857 case OMX_VIDEO_CodingMPEG4: 3858 err = setupMPEG4EncoderParameters(msg); 3859 break; 3860 3861 case OMX_VIDEO_CodingH263: 3862 err = setupH263EncoderParameters(msg); 3863 break; 3864 3865 case OMX_VIDEO_CodingAVC: 3866 err = setupAVCEncoderParameters(msg); 3867 break; 3868 3869 case OMX_VIDEO_CodingHEVC: 3870 err = setupHEVCEncoderParameters(msg); 3871 break; 3872 3873 case OMX_VIDEO_CodingVP8: 3874 case OMX_VIDEO_CodingVP9: 3875 err = setupVPXEncoderParameters(msg); 3876 break; 3877 3878 default: 3879 break; 3880 } 3881 3882 if (err != OK) { 3883 return err; 3884 } 3885 3886 // Set up color aspects on input, but propagate them to the output format, as they will 3887 // not be read back from encoder. 3888 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3889 if (err == ERROR_UNSUPPORTED) { 3890 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3891 err = OK; 3892 } 3893 3894 if (err != OK) { 3895 return err; 3896 } 3897 3898 err = setHDRStaticInfoForVideoCodec(kPortIndexInput, msg, outputFormat); 3899 if (err == ERROR_UNSUPPORTED) { // support is optional 3900 ALOGI("[%s] cannot encode HDR static metadata. Ignoring.", mComponentName.c_str()); 3901 err = OK; 3902 } 3903 3904 if (err != OK) { 3905 return err; 3906 } 3907 3908 switch (compressionFormat) { 3909 case OMX_VIDEO_CodingAVC: 3910 case OMX_VIDEO_CodingHEVC: 3911 err = configureTemporalLayers(msg, true /* inConfigure */, outputFormat); 3912 if (err != OK) { 3913 err = OK; // ignore failure 3914 } 3915 break; 3916 3917 case OMX_VIDEO_CodingVP8: 3918 case OMX_VIDEO_CodingVP9: 3919 // TODO: do we need to support android.generic layering? webrtc layering is 3920 // already set up in setupVPXEncoderParameters. 3921 break; 3922 3923 default: 3924 break; 3925 } 3926 3927 if (err == OK) { 3928 ALOGI("setupVideoEncoder succeeded"); 3929 } 3930 3931 return err; 3932} 3933 3934status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3935 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3936 InitOMXParams(¶ms); 3937 params.nPortIndex = kPortIndexOutput; 3938 3939 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3940 3941 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3942 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3943 int32_t mbs; 3944 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3945 return INVALID_OPERATION; 3946 } 3947 params.nCirMBs = mbs; 3948 } 3949 3950 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3951 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3952 int32_t mbs; 3953 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3954 return INVALID_OPERATION; 3955 } 3956 params.nAirMBs = mbs; 3957 3958 int32_t ref; 3959 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3960 return INVALID_OPERATION; 3961 } 3962 params.nAirRef = ref; 3963 } 3964 3965 status_t err = mOMX->setParameter( 3966 mNode, OMX_IndexParamVideoIntraRefresh, 3967 ¶ms, sizeof(params)); 3968 return err; 3969} 3970 3971static OMX_U32 setPFramesSpacing( 3972 int32_t iFramesInterval /* seconds */, int32_t frameRate, uint32_t BFramesSpacing = 0) { 3973 // BFramesSpacing is the number of B frames between I/P frames 3974 // PFramesSpacing (the value to be returned) is the number of P frames between I frames 3975 // 3976 // keyFrameInterval = ((PFramesSpacing + 1) * BFramesSpacing) + PFramesSpacing + 1 3977 // ^^^ ^^^ ^^^ 3978 // number of B frames number of P I frame 3979 // 3980 // = (PFramesSpacing + 1) * (BFramesSpacing + 1) 3981 // 3982 // E.g. 3983 // I P I : I-interval: 8, nPFrames 1, nBFrames 3 3984 // BBB BBB 3985 3986 if (iFramesInterval < 0) { // just 1 key frame 3987 return 0xFFFFFFFE; // don't use maxint as key-frame-interval calculation will add 1 3988 } else if (iFramesInterval == 0) { // just key frames 3989 return 0; 3990 } 3991 3992 // round down as key-frame-interval is an upper limit 3993 uint32_t keyFrameInterval = uint32_t(frameRate * iFramesInterval); 3994 OMX_U32 ret = keyFrameInterval / (BFramesSpacing + 1); 3995 return ret > 0 ? ret - 1 : 0; 3996} 3997 3998static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 3999 int32_t tmp; 4000 if (!msg->findInt32("bitrate-mode", &tmp)) { 4001 return OMX_Video_ControlRateVariable; 4002 } 4003 4004 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 4005} 4006 4007status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 4008 int32_t bitrate, iFrameInterval; 4009 if (!msg->findInt32("bitrate", &bitrate) 4010 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4011 return INVALID_OPERATION; 4012 } 4013 4014 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4015 4016 float frameRate; 4017 if (!msg->findFloat("frame-rate", &frameRate)) { 4018 int32_t tmp; 4019 if (!msg->findInt32("frame-rate", &tmp)) { 4020 return INVALID_OPERATION; 4021 } 4022 frameRate = (float)tmp; 4023 } 4024 4025 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 4026 InitOMXParams(&mpeg4type); 4027 mpeg4type.nPortIndex = kPortIndexOutput; 4028 4029 status_t err = mOMX->getParameter( 4030 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 4031 4032 if (err != OK) { 4033 return err; 4034 } 4035 4036 mpeg4type.nSliceHeaderSpacing = 0; 4037 mpeg4type.bSVH = OMX_FALSE; 4038 mpeg4type.bGov = OMX_FALSE; 4039 4040 mpeg4type.nAllowedPictureTypes = 4041 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4042 4043 mpeg4type.nBFrames = 0; 4044 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, mpeg4type.nBFrames); 4045 if (mpeg4type.nPFrames == 0) { 4046 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4047 } 4048 mpeg4type.nIDCVLCThreshold = 0; 4049 mpeg4type.bACPred = OMX_TRUE; 4050 mpeg4type.nMaxPacketSize = 256; 4051 mpeg4type.nTimeIncRes = 1000; 4052 mpeg4type.nHeaderExtension = 0; 4053 mpeg4type.bReversibleVLC = OMX_FALSE; 4054 4055 int32_t profile; 4056 if (msg->findInt32("profile", &profile)) { 4057 int32_t level; 4058 if (!msg->findInt32("level", &level)) { 4059 return INVALID_OPERATION; 4060 } 4061 4062 err = verifySupportForProfileAndLevel(profile, level); 4063 4064 if (err != OK) { 4065 return err; 4066 } 4067 4068 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 4069 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 4070 } 4071 4072 err = mOMX->setParameter( 4073 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 4074 4075 if (err != OK) { 4076 return err; 4077 } 4078 4079 err = configureBitrate(bitrate, bitrateMode); 4080 4081 if (err != OK) { 4082 return err; 4083 } 4084 4085 return setupErrorCorrectionParameters(); 4086} 4087 4088status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 4089 int32_t bitrate, iFrameInterval; 4090 if (!msg->findInt32("bitrate", &bitrate) 4091 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4092 return INVALID_OPERATION; 4093 } 4094 4095 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4096 4097 float frameRate; 4098 if (!msg->findFloat("frame-rate", &frameRate)) { 4099 int32_t tmp; 4100 if (!msg->findInt32("frame-rate", &tmp)) { 4101 return INVALID_OPERATION; 4102 } 4103 frameRate = (float)tmp; 4104 } 4105 4106 OMX_VIDEO_PARAM_H263TYPE h263type; 4107 InitOMXParams(&h263type); 4108 h263type.nPortIndex = kPortIndexOutput; 4109 4110 status_t err = mOMX->getParameter( 4111 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 4112 4113 if (err != OK) { 4114 return err; 4115 } 4116 4117 h263type.nAllowedPictureTypes = 4118 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4119 4120 h263type.nBFrames = 0; 4121 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h263type.nBFrames); 4122 if (h263type.nPFrames == 0) { 4123 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4124 } 4125 4126 int32_t profile; 4127 if (msg->findInt32("profile", &profile)) { 4128 int32_t level; 4129 if (!msg->findInt32("level", &level)) { 4130 return INVALID_OPERATION; 4131 } 4132 4133 err = verifySupportForProfileAndLevel(profile, level); 4134 4135 if (err != OK) { 4136 return err; 4137 } 4138 4139 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 4140 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 4141 } 4142 4143 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 4144 h263type.bForceRoundingTypeToZero = OMX_FALSE; 4145 h263type.nPictureHeaderRepetition = 0; 4146 h263type.nGOBHeaderInterval = 0; 4147 4148 err = mOMX->setParameter( 4149 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 4150 4151 if (err != OK) { 4152 return err; 4153 } 4154 4155 err = configureBitrate(bitrate, bitrateMode); 4156 4157 if (err != OK) { 4158 return err; 4159 } 4160 4161 return setupErrorCorrectionParameters(); 4162} 4163 4164// static 4165int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 4166 int width, int height, int rate, int bitrate, 4167 OMX_VIDEO_AVCPROFILETYPE profile) { 4168 // convert bitrate to main/baseline profile kbps equivalent 4169 switch (profile) { 4170 case OMX_VIDEO_AVCProfileHigh10: 4171 bitrate = divUp(bitrate, 3000); break; 4172 case OMX_VIDEO_AVCProfileHigh: 4173 bitrate = divUp(bitrate, 1250); break; 4174 default: 4175 bitrate = divUp(bitrate, 1000); break; 4176 } 4177 4178 // convert size and rate to MBs 4179 width = divUp(width, 16); 4180 height = divUp(height, 16); 4181 int mbs = width * height; 4182 rate *= mbs; 4183 int maxDimension = max(width, height); 4184 4185 static const int limits[][5] = { 4186 /* MBps MB dim bitrate level */ 4187 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 4188 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 4189 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 4190 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 4191 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 4192 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 4193 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 4194 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 4195 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 4196 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 4197 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 4198 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 4199 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 4200 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 4201 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 4202 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 4203 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 4204 }; 4205 4206 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 4207 const int (&limit)[5] = limits[i]; 4208 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 4209 && bitrate <= limit[3]) { 4210 return limit[4]; 4211 } 4212 } 4213 return 0; 4214} 4215 4216status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 4217 int32_t bitrate, iFrameInterval; 4218 if (!msg->findInt32("bitrate", &bitrate) 4219 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4220 return INVALID_OPERATION; 4221 } 4222 4223 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4224 4225 float frameRate; 4226 if (!msg->findFloat("frame-rate", &frameRate)) { 4227 int32_t tmp; 4228 if (!msg->findInt32("frame-rate", &tmp)) { 4229 return INVALID_OPERATION; 4230 } 4231 frameRate = (float)tmp; 4232 } 4233 4234 status_t err = OK; 4235 int32_t intraRefreshMode = 0; 4236 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 4237 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 4238 if (err != OK) { 4239 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 4240 err, intraRefreshMode); 4241 return err; 4242 } 4243 } 4244 4245 OMX_VIDEO_PARAM_AVCTYPE h264type; 4246 InitOMXParams(&h264type); 4247 h264type.nPortIndex = kPortIndexOutput; 4248 4249 err = mOMX->getParameter( 4250 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4251 4252 if (err != OK) { 4253 return err; 4254 } 4255 4256 h264type.nAllowedPictureTypes = 4257 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4258 4259 int32_t profile; 4260 if (msg->findInt32("profile", &profile)) { 4261 int32_t level; 4262 if (!msg->findInt32("level", &level)) { 4263 return INVALID_OPERATION; 4264 } 4265 4266 err = verifySupportForProfileAndLevel(profile, level); 4267 4268 if (err != OK) { 4269 return err; 4270 } 4271 4272 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 4273 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 4274 } else { 4275 // Use largest supported profile for AVC recording if profile is not specified. 4276 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 4277 for (OMX_VIDEO_AVCPROFILETYPE profile : { 4278 OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCProfileMain }) { 4279 if (verifySupportForProfileAndLevel(profile, 0) == OK) { 4280 h264type.eProfile = profile; 4281 break; 4282 } 4283 } 4284 } 4285 4286 ALOGI("setupAVCEncoderParameters with [profile: %s] [level: %s]", 4287 asString(h264type.eProfile), asString(h264type.eLevel)); 4288 4289 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 4290 h264type.nSliceHeaderSpacing = 0; 4291 h264type.bUseHadamard = OMX_TRUE; 4292 h264type.nRefFrames = 1; 4293 h264type.nBFrames = 0; 4294 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4295 if (h264type.nPFrames == 0) { 4296 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4297 } 4298 h264type.nRefIdx10ActiveMinus1 = 0; 4299 h264type.nRefIdx11ActiveMinus1 = 0; 4300 h264type.bEntropyCodingCABAC = OMX_FALSE; 4301 h264type.bWeightedPPrediction = OMX_FALSE; 4302 h264type.bconstIpred = OMX_FALSE; 4303 h264type.bDirect8x8Inference = OMX_FALSE; 4304 h264type.bDirectSpatialTemporal = OMX_FALSE; 4305 h264type.nCabacInitIdc = 0; 4306 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 4307 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 4308 h264type.nSliceHeaderSpacing = 0; 4309 h264type.bUseHadamard = OMX_TRUE; 4310 h264type.nRefFrames = 2; 4311 h264type.nBFrames = 1; 4312 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4313 h264type.nAllowedPictureTypes = 4314 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 4315 h264type.nRefIdx10ActiveMinus1 = 0; 4316 h264type.nRefIdx11ActiveMinus1 = 0; 4317 h264type.bEntropyCodingCABAC = OMX_TRUE; 4318 h264type.bWeightedPPrediction = OMX_TRUE; 4319 h264type.bconstIpred = OMX_TRUE; 4320 h264type.bDirect8x8Inference = OMX_TRUE; 4321 h264type.bDirectSpatialTemporal = OMX_TRUE; 4322 h264type.nCabacInitIdc = 1; 4323 } 4324 4325 if (h264type.nBFrames != 0) { 4326 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 4327 } 4328 4329 h264type.bEnableUEP = OMX_FALSE; 4330 h264type.bEnableFMO = OMX_FALSE; 4331 h264type.bEnableASO = OMX_FALSE; 4332 h264type.bEnableRS = OMX_FALSE; 4333 h264type.bFrameMBsOnly = OMX_TRUE; 4334 h264type.bMBAFF = OMX_FALSE; 4335 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 4336 4337 err = mOMX->setParameter( 4338 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4339 4340 if (err != OK) { 4341 return err; 4342 } 4343 4344 return configureBitrate(bitrate, bitrateMode); 4345} 4346 4347status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 4348 int32_t bitrate, iFrameInterval; 4349 if (!msg->findInt32("bitrate", &bitrate) 4350 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4351 return INVALID_OPERATION; 4352 } 4353 4354 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4355 4356 float frameRate; 4357 if (!msg->findFloat("frame-rate", &frameRate)) { 4358 int32_t tmp; 4359 if (!msg->findInt32("frame-rate", &tmp)) { 4360 return INVALID_OPERATION; 4361 } 4362 frameRate = (float)tmp; 4363 } 4364 4365 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 4366 InitOMXParams(&hevcType); 4367 hevcType.nPortIndex = kPortIndexOutput; 4368 4369 status_t err = OK; 4370 err = mOMX->getParameter( 4371 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4372 if (err != OK) { 4373 return err; 4374 } 4375 4376 int32_t profile; 4377 if (msg->findInt32("profile", &profile)) { 4378 int32_t level; 4379 if (!msg->findInt32("level", &level)) { 4380 return INVALID_OPERATION; 4381 } 4382 4383 err = verifySupportForProfileAndLevel(profile, level); 4384 if (err != OK) { 4385 return err; 4386 } 4387 4388 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 4389 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 4390 } 4391 // TODO: finer control? 4392 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1; 4393 4394 err = mOMX->setParameter( 4395 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4396 if (err != OK) { 4397 return err; 4398 } 4399 4400 return configureBitrate(bitrate, bitrateMode); 4401} 4402 4403status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { 4404 int32_t bitrate; 4405 int32_t iFrameInterval = 0; 4406 size_t tsLayers = 0; 4407 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 4408 OMX_VIDEO_VPXTemporalLayerPatternNone; 4409 static const uint32_t kVp8LayerRateAlloction 4410 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 4411 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 4412 {100, 100, 100}, // 1 layer 4413 { 60, 100, 100}, // 2 layers {60%, 40%} 4414 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 4415 }; 4416 if (!msg->findInt32("bitrate", &bitrate)) { 4417 return INVALID_OPERATION; 4418 } 4419 msg->findInt32("i-frame-interval", &iFrameInterval); 4420 4421 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4422 4423 float frameRate; 4424 if (!msg->findFloat("frame-rate", &frameRate)) { 4425 int32_t tmp; 4426 if (!msg->findInt32("frame-rate", &tmp)) { 4427 return INVALID_OPERATION; 4428 } 4429 frameRate = (float)tmp; 4430 } 4431 4432 AString tsSchema; 4433 if (msg->findString("ts-schema", &tsSchema)) { 4434 unsigned int numLayers = 0; 4435 unsigned int numBLayers = 0; 4436 int tags; 4437 char dummy; 4438 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1 4439 && numLayers > 0) { 4440 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4441 tsLayers = numLayers; 4442 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c", 4443 &numLayers, &dummy, &numBLayers, &dummy)) 4444 && (tags == 1 || (tags == 3 && dummy == '+')) 4445 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) { 4446 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4447 // VPX does not have a concept of B-frames, so just count all layers 4448 tsLayers = numLayers + numBLayers; 4449 } else { 4450 ALOGW("Ignoring unsupported ts-schema [%s]", tsSchema.c_str()); 4451 } 4452 tsLayers = min(tsLayers, (size_t)OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS); 4453 } 4454 4455 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4456 InitOMXParams(&vp8type); 4457 vp8type.nPortIndex = kPortIndexOutput; 4458 status_t err = mOMX->getParameter( 4459 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4460 &vp8type, sizeof(vp8type)); 4461 4462 if (err == OK) { 4463 if (iFrameInterval > 0) { 4464 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1; 4465 } 4466 vp8type.eTemporalPattern = pattern; 4467 vp8type.nTemporalLayerCount = tsLayers; 4468 if (tsLayers > 0) { 4469 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4470 vp8type.nTemporalLayerBitrateRatio[i] = 4471 kVp8LayerRateAlloction[tsLayers - 1][i]; 4472 } 4473 } 4474 if (bitrateMode == OMX_Video_ControlRateConstant) { 4475 vp8type.nMinQuantizer = 2; 4476 vp8type.nMaxQuantizer = 63; 4477 } 4478 4479 err = mOMX->setParameter( 4480 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4481 &vp8type, sizeof(vp8type)); 4482 if (err != OK) { 4483 ALOGW("Extended VP8 parameters set failed: %d", err); 4484 } 4485 } 4486 4487 return configureBitrate(bitrate, bitrateMode); 4488} 4489 4490status_t ACodec::verifySupportForProfileAndLevel( 4491 int32_t profile, int32_t level) { 4492 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4493 InitOMXParams(¶ms); 4494 params.nPortIndex = kPortIndexOutput; 4495 4496 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4497 params.nProfileIndex = index; 4498 status_t err = mOMX->getParameter( 4499 mNode, 4500 OMX_IndexParamVideoProfileLevelQuerySupported, 4501 ¶ms, 4502 sizeof(params)); 4503 4504 if (err != OK) { 4505 return err; 4506 } 4507 4508 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4509 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4510 4511 if (profile == supportedProfile && level <= supportedLevel) { 4512 return OK; 4513 } 4514 4515 if (index == kMaxIndicesToCheck) { 4516 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4517 mComponentName.c_str(), index, 4518 params.eProfile, params.eLevel); 4519 } 4520 } 4521 return ERROR_UNSUPPORTED; 4522} 4523 4524status_t ACodec::configureBitrate( 4525 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4526 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4527 InitOMXParams(&bitrateType); 4528 bitrateType.nPortIndex = kPortIndexOutput; 4529 4530 status_t err = mOMX->getParameter( 4531 mNode, OMX_IndexParamVideoBitrate, 4532 &bitrateType, sizeof(bitrateType)); 4533 4534 if (err != OK) { 4535 return err; 4536 } 4537 4538 bitrateType.eControlRate = bitrateMode; 4539 bitrateType.nTargetBitrate = bitrate; 4540 4541 return mOMX->setParameter( 4542 mNode, OMX_IndexParamVideoBitrate, 4543 &bitrateType, sizeof(bitrateType)); 4544} 4545 4546status_t ACodec::setupErrorCorrectionParameters() { 4547 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4548 InitOMXParams(&errorCorrectionType); 4549 errorCorrectionType.nPortIndex = kPortIndexOutput; 4550 4551 status_t err = mOMX->getParameter( 4552 mNode, OMX_IndexParamVideoErrorCorrection, 4553 &errorCorrectionType, sizeof(errorCorrectionType)); 4554 4555 if (err != OK) { 4556 return OK; // Optional feature. Ignore this failure 4557 } 4558 4559 errorCorrectionType.bEnableHEC = OMX_FALSE; 4560 errorCorrectionType.bEnableResync = OMX_TRUE; 4561 errorCorrectionType.nResynchMarkerSpacing = 256; 4562 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4563 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4564 4565 return mOMX->setParameter( 4566 mNode, OMX_IndexParamVideoErrorCorrection, 4567 &errorCorrectionType, sizeof(errorCorrectionType)); 4568} 4569 4570status_t ACodec::setVideoFormatOnPort( 4571 OMX_U32 portIndex, 4572 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4573 float frameRate) { 4574 OMX_PARAM_PORTDEFINITIONTYPE def; 4575 InitOMXParams(&def); 4576 def.nPortIndex = portIndex; 4577 4578 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4579 4580 status_t err = mOMX->getParameter( 4581 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4582 if (err != OK) { 4583 return err; 4584 } 4585 4586 if (portIndex == kPortIndexInput) { 4587 // XXX Need a (much) better heuristic to compute input buffer sizes. 4588 const size_t X = 64 * 1024; 4589 if (def.nBufferSize < X) { 4590 def.nBufferSize = X; 4591 } 4592 } 4593 4594 if (def.eDomain != OMX_PortDomainVideo) { 4595 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4596 return FAILED_TRANSACTION; 4597 } 4598 4599 video_def->nFrameWidth = width; 4600 video_def->nFrameHeight = height; 4601 4602 if (portIndex == kPortIndexInput) { 4603 video_def->eCompressionFormat = compressionFormat; 4604 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4605 if (frameRate >= 0) { 4606 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4607 } 4608 } 4609 4610 err = mOMX->setParameter( 4611 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4612 4613 return err; 4614} 4615 4616status_t ACodec::initNativeWindow() { 4617 if (mNativeWindow != NULL) { 4618 return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4619 } 4620 4621 mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4622 return OK; 4623} 4624 4625size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4626 size_t n = 0; 4627 4628 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4629 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4630 4631 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4632 ++n; 4633 } 4634 } 4635 4636 return n; 4637} 4638 4639size_t ACodec::countBuffersOwnedByNativeWindow() const { 4640 size_t n = 0; 4641 4642 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4643 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4644 4645 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4646 ++n; 4647 } 4648 } 4649 4650 return n; 4651} 4652 4653void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4654 if (mNativeWindow == NULL) { 4655 return; 4656 } 4657 4658 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4659 && dequeueBufferFromNativeWindow() != NULL) { 4660 // these buffers will be submitted as regular buffers; account for this 4661 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4662 --mMetadataBuffersToSubmit; 4663 } 4664 } 4665} 4666 4667bool ACodec::allYourBuffersAreBelongToUs( 4668 OMX_U32 portIndex) { 4669 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4670 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4671 4672 if (info->mStatus != BufferInfo::OWNED_BY_US 4673 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4674 ALOGV("[%s] Buffer %u on port %u still has status %d", 4675 mComponentName.c_str(), 4676 info->mBufferID, portIndex, info->mStatus); 4677 return false; 4678 } 4679 } 4680 4681 return true; 4682} 4683 4684bool ACodec::allYourBuffersAreBelongToUs() { 4685 return allYourBuffersAreBelongToUs(kPortIndexInput) 4686 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4687} 4688 4689void ACodec::deferMessage(const sp<AMessage> &msg) { 4690 mDeferredQueue.push_back(msg); 4691} 4692 4693void ACodec::processDeferredMessages() { 4694 List<sp<AMessage> > queue = mDeferredQueue; 4695 mDeferredQueue.clear(); 4696 4697 List<sp<AMessage> >::iterator it = queue.begin(); 4698 while (it != queue.end()) { 4699 onMessageReceived(*it++); 4700 } 4701} 4702 4703// static 4704bool ACodec::describeDefaultColorFormat(DescribeColorFormat2Params ¶ms) { 4705 MediaImage2 &image = params.sMediaImage; 4706 memset(&image, 0, sizeof(image)); 4707 4708 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4709 image.mNumPlanes = 0; 4710 4711 const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat; 4712 image.mWidth = params.nFrameWidth; 4713 image.mHeight = params.nFrameHeight; 4714 4715 // only supporting YUV420 4716 if (fmt != OMX_COLOR_FormatYUV420Planar && 4717 fmt != OMX_COLOR_FormatYUV420PackedPlanar && 4718 fmt != OMX_COLOR_FormatYUV420SemiPlanar && 4719 fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar && 4720 fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) { 4721 ALOGW("do not know color format 0x%x = %d", fmt, fmt); 4722 return false; 4723 } 4724 4725 // TEMPORARY FIX for some vendors that advertise sliceHeight as 0 4726 if (params.nStride != 0 && params.nSliceHeight == 0) { 4727 ALOGW("using sliceHeight=%u instead of what codec advertised (=0)", 4728 params.nFrameHeight); 4729 params.nSliceHeight = params.nFrameHeight; 4730 } 4731 4732 // we need stride and slice-height to be non-zero and sensible. These values were chosen to 4733 // prevent integer overflows further down the line, and do not indicate support for 4734 // 32kx32k video. 4735 if (params.nStride == 0 || params.nSliceHeight == 0 4736 || params.nStride > 32768 || params.nSliceHeight > 32768) { 4737 ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u", 4738 fmt, fmt, params.nStride, params.nSliceHeight); 4739 return false; 4740 } 4741 4742 // set-up YUV format 4743 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV; 4744 image.mNumPlanes = 3; 4745 image.mBitDepth = 8; 4746 image.mBitDepthAllocated = 8; 4747 image.mPlane[image.Y].mOffset = 0; 4748 image.mPlane[image.Y].mColInc = 1; 4749 image.mPlane[image.Y].mRowInc = params.nStride; 4750 image.mPlane[image.Y].mHorizSubsampling = 1; 4751 image.mPlane[image.Y].mVertSubsampling = 1; 4752 4753 switch ((int)fmt) { 4754 case HAL_PIXEL_FORMAT_YV12: 4755 if (params.bUsingNativeBuffers) { 4756 size_t ystride = align(params.nStride, 16); 4757 size_t cstride = align(params.nStride / 2, 16); 4758 image.mPlane[image.Y].mRowInc = ystride; 4759 4760 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight; 4761 image.mPlane[image.V].mColInc = 1; 4762 image.mPlane[image.V].mRowInc = cstride; 4763 image.mPlane[image.V].mHorizSubsampling = 2; 4764 image.mPlane[image.V].mVertSubsampling = 2; 4765 4766 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset 4767 + (cstride * params.nSliceHeight / 2); 4768 image.mPlane[image.U].mColInc = 1; 4769 image.mPlane[image.U].mRowInc = cstride; 4770 image.mPlane[image.U].mHorizSubsampling = 2; 4771 image.mPlane[image.U].mVertSubsampling = 2; 4772 break; 4773 } else { 4774 // fall through as YV12 is used for YUV420Planar by some codecs 4775 } 4776 4777 case OMX_COLOR_FormatYUV420Planar: 4778 case OMX_COLOR_FormatYUV420PackedPlanar: 4779 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4780 image.mPlane[image.U].mColInc = 1; 4781 image.mPlane[image.U].mRowInc = params.nStride / 2; 4782 image.mPlane[image.U].mHorizSubsampling = 2; 4783 image.mPlane[image.U].mVertSubsampling = 2; 4784 4785 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset 4786 + (params.nStride * params.nSliceHeight / 4); 4787 image.mPlane[image.V].mColInc = 1; 4788 image.mPlane[image.V].mRowInc = params.nStride / 2; 4789 image.mPlane[image.V].mHorizSubsampling = 2; 4790 image.mPlane[image.V].mVertSubsampling = 2; 4791 break; 4792 4793 case OMX_COLOR_FormatYUV420SemiPlanar: 4794 // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder 4795 case OMX_COLOR_FormatYUV420PackedSemiPlanar: 4796 // NV12 4797 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4798 image.mPlane[image.U].mColInc = 2; 4799 image.mPlane[image.U].mRowInc = params.nStride; 4800 image.mPlane[image.U].mHorizSubsampling = 2; 4801 image.mPlane[image.U].mVertSubsampling = 2; 4802 4803 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1; 4804 image.mPlane[image.V].mColInc = 2; 4805 image.mPlane[image.V].mRowInc = params.nStride; 4806 image.mPlane[image.V].mHorizSubsampling = 2; 4807 image.mPlane[image.V].mVertSubsampling = 2; 4808 break; 4809 4810 default: 4811 TRESPASS(); 4812 } 4813 return true; 4814} 4815 4816// static 4817bool ACodec::describeColorFormat( 4818 const sp<IOMX> &omx, IOMX::node_id node, 4819 DescribeColorFormat2Params &describeParams) 4820{ 4821 OMX_INDEXTYPE describeColorFormatIndex; 4822 if (omx->getExtensionIndex( 4823 node, "OMX.google.android.index.describeColorFormat", 4824 &describeColorFormatIndex) == OK) { 4825 DescribeColorFormatParams describeParamsV1(describeParams); 4826 if (omx->getParameter( 4827 node, describeColorFormatIndex, 4828 &describeParamsV1, sizeof(describeParamsV1)) == OK) { 4829 describeParams.initFromV1(describeParamsV1); 4830 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4831 } 4832 } else if (omx->getExtensionIndex( 4833 node, "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK 4834 && omx->getParameter( 4835 node, describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) { 4836 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4837 } 4838 4839 return describeDefaultColorFormat(describeParams); 4840} 4841 4842// static 4843bool ACodec::isFlexibleColorFormat( 4844 const sp<IOMX> &omx, IOMX::node_id node, 4845 uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) { 4846 DescribeColorFormat2Params describeParams; 4847 InitOMXParams(&describeParams); 4848 describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; 4849 // reasonable dummy values 4850 describeParams.nFrameWidth = 128; 4851 describeParams.nFrameHeight = 128; 4852 describeParams.nStride = 128; 4853 describeParams.nSliceHeight = 128; 4854 describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers; 4855 4856 CHECK(flexibleEquivalent != NULL); 4857 4858 if (!describeColorFormat(omx, node, describeParams)) { 4859 return false; 4860 } 4861 4862 const MediaImage2 &img = describeParams.sMediaImage; 4863 if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) { 4864 if (img.mNumPlanes != 3 4865 || img.mPlane[img.Y].mHorizSubsampling != 1 4866 || img.mPlane[img.Y].mVertSubsampling != 1) { 4867 return false; 4868 } 4869 4870 // YUV 420 4871 if (img.mPlane[img.U].mHorizSubsampling == 2 4872 && img.mPlane[img.U].mVertSubsampling == 2 4873 && img.mPlane[img.V].mHorizSubsampling == 2 4874 && img.mPlane[img.V].mVertSubsampling == 2) { 4875 // possible flexible YUV420 format 4876 if (img.mBitDepth <= 8) { 4877 *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible; 4878 return true; 4879 } 4880 } 4881 } 4882 return false; 4883} 4884 4885status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4886 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4887 OMX_PARAM_PORTDEFINITIONTYPE def; 4888 InitOMXParams(&def); 4889 def.nPortIndex = portIndex; 4890 4891 status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4892 if (err != OK) { 4893 return err; 4894 } 4895 4896 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4897 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4898 return BAD_VALUE; 4899 } 4900 4901 switch (def.eDomain) { 4902 case OMX_PortDomainVideo: 4903 { 4904 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4905 switch ((int)videoDef->eCompressionFormat) { 4906 case OMX_VIDEO_CodingUnused: 4907 { 4908 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4909 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4910 4911 notify->setInt32("stride", videoDef->nStride); 4912 notify->setInt32("slice-height", videoDef->nSliceHeight); 4913 notify->setInt32("color-format", videoDef->eColorFormat); 4914 4915 if (mNativeWindow == NULL) { 4916 DescribeColorFormat2Params describeParams; 4917 InitOMXParams(&describeParams); 4918 describeParams.eColorFormat = videoDef->eColorFormat; 4919 describeParams.nFrameWidth = videoDef->nFrameWidth; 4920 describeParams.nFrameHeight = videoDef->nFrameHeight; 4921 describeParams.nStride = videoDef->nStride; 4922 describeParams.nSliceHeight = videoDef->nSliceHeight; 4923 describeParams.bUsingNativeBuffers = OMX_FALSE; 4924 4925 if (describeColorFormat(mOMX, mNode, describeParams)) { 4926 notify->setBuffer( 4927 "image-data", 4928 ABuffer::CreateAsCopy( 4929 &describeParams.sMediaImage, 4930 sizeof(describeParams.sMediaImage))); 4931 4932 MediaImage2 &img = describeParams.sMediaImage; 4933 MediaImage2::PlaneInfo *plane = img.mPlane; 4934 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4935 mComponentName.c_str(), img.mWidth, img.mHeight, 4936 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4937 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4938 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4939 } 4940 } 4941 4942 int32_t width = (int32_t)videoDef->nFrameWidth; 4943 int32_t height = (int32_t)videoDef->nFrameHeight; 4944 4945 if (portIndex == kPortIndexOutput) { 4946 OMX_CONFIG_RECTTYPE rect; 4947 InitOMXParams(&rect); 4948 rect.nPortIndex = portIndex; 4949 4950 if (mOMX->getConfig( 4951 mNode, 4952 (portIndex == kPortIndexOutput ? 4953 OMX_IndexConfigCommonOutputCrop : 4954 OMX_IndexConfigCommonInputCrop), 4955 &rect, sizeof(rect)) != OK) { 4956 rect.nLeft = 0; 4957 rect.nTop = 0; 4958 rect.nWidth = videoDef->nFrameWidth; 4959 rect.nHeight = videoDef->nFrameHeight; 4960 } 4961 4962 if (rect.nLeft < 0 || 4963 rect.nTop < 0 || 4964 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4965 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4966 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 4967 rect.nLeft, rect.nTop, 4968 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 4969 videoDef->nFrameWidth, videoDef->nFrameHeight); 4970 return BAD_VALUE; 4971 } 4972 4973 notify->setRect( 4974 "crop", 4975 rect.nLeft, 4976 rect.nTop, 4977 rect.nLeft + rect.nWidth - 1, 4978 rect.nTop + rect.nHeight - 1); 4979 4980 width = rect.nWidth; 4981 height = rect.nHeight; 4982 4983 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4984 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4985 width, height, mConfigFormat, notify, 4986 mUsingNativeWindow ? &dataSpace : NULL); 4987 if (mUsingNativeWindow) { 4988 notify->setInt32("android._dataspace", dataSpace); 4989 } 4990 (void)getHDRStaticInfoForVideoCodec(kPortIndexOutput, notify); 4991 } else { 4992 (void)getInputColorAspectsForVideoEncoder(notify); 4993 if (mConfigFormat->contains("hdr-static-info")) { 4994 (void)getHDRStaticInfoForVideoCodec(kPortIndexInput, notify); 4995 } 4996 } 4997 4998 break; 4999 } 5000 5001 case OMX_VIDEO_CodingVP8: 5002 case OMX_VIDEO_CodingVP9: 5003 { 5004 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 5005 InitOMXParams(&vp8type); 5006 vp8type.nPortIndex = kPortIndexOutput; 5007 status_t err = mOMX->getParameter( 5008 mNode, 5009 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 5010 &vp8type, 5011 sizeof(vp8type)); 5012 5013 if (err == OK) { 5014 AString tsSchema = "none"; 5015 if (vp8type.eTemporalPattern 5016 == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { 5017 switch (vp8type.nTemporalLayerCount) { 5018 case 1: 5019 { 5020 tsSchema = "webrtc.vp8.1-layer"; 5021 break; 5022 } 5023 case 2: 5024 { 5025 tsSchema = "webrtc.vp8.2-layer"; 5026 break; 5027 } 5028 case 3: 5029 { 5030 tsSchema = "webrtc.vp8.3-layer"; 5031 break; 5032 } 5033 default: 5034 { 5035 break; 5036 } 5037 } 5038 } 5039 notify->setString("ts-schema", tsSchema); 5040 } 5041 // Fall through to set up mime. 5042 } 5043 5044 default: 5045 { 5046 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 5047 // should be CodingUnused 5048 ALOGE("Raw port video compression format is %s(%d)", 5049 asString(videoDef->eCompressionFormat), 5050 videoDef->eCompressionFormat); 5051 return BAD_VALUE; 5052 } 5053 AString mime; 5054 if (GetMimeTypeForVideoCoding( 5055 videoDef->eCompressionFormat, &mime) != OK) { 5056 notify->setString("mime", "application/octet-stream"); 5057 } else { 5058 notify->setString("mime", mime.c_str()); 5059 } 5060 uint32_t intraRefreshPeriod = 0; 5061 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 5062 && intraRefreshPeriod > 0) { 5063 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 5064 } 5065 break; 5066 } 5067 } 5068 notify->setInt32("width", videoDef->nFrameWidth); 5069 notify->setInt32("height", videoDef->nFrameHeight); 5070 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 5071 portIndex == kPortIndexInput ? "input" : "output", 5072 notify->debugString().c_str()); 5073 5074 break; 5075 } 5076 5077 case OMX_PortDomainAudio: 5078 { 5079 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 5080 5081 switch ((int)audioDef->eEncoding) { 5082 case OMX_AUDIO_CodingPCM: 5083 { 5084 OMX_AUDIO_PARAM_PCMMODETYPE params; 5085 InitOMXParams(¶ms); 5086 params.nPortIndex = portIndex; 5087 5088 err = mOMX->getParameter( 5089 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5090 if (err != OK) { 5091 return err; 5092 } 5093 5094 if (params.nChannels <= 0 5095 || (params.nChannels != 1 && !params.bInterleaved) 5096 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 5097 ALOGE("unsupported PCM port: %u channels%s, %u-bit", 5098 params.nChannels, 5099 params.bInterleaved ? " interleaved" : "", 5100 params.nBitPerSample); 5101 return FAILED_TRANSACTION; 5102 } 5103 5104 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 5105 notify->setInt32("channel-count", params.nChannels); 5106 notify->setInt32("sample-rate", params.nSamplingRate); 5107 5108 AudioEncoding encoding = kAudioEncodingPcm16bit; 5109 if (params.eNumData == OMX_NumericalDataUnsigned 5110 && params.nBitPerSample == 8u) { 5111 encoding = kAudioEncodingPcm8bit; 5112 } else if (params.eNumData == OMX_NumericalDataFloat 5113 && params.nBitPerSample == 32u) { 5114 encoding = kAudioEncodingPcmFloat; 5115 } else if (params.nBitPerSample != 16u 5116 || params.eNumData != OMX_NumericalDataSigned) { 5117 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", 5118 asString(params.eNumData), params.eNumData, 5119 asString(params.ePCMMode), params.ePCMMode); 5120 return FAILED_TRANSACTION; 5121 } 5122 notify->setInt32("pcm-encoding", encoding); 5123 5124 if (mChannelMaskPresent) { 5125 notify->setInt32("channel-mask", mChannelMask); 5126 } 5127 break; 5128 } 5129 5130 case OMX_AUDIO_CodingAAC: 5131 { 5132 OMX_AUDIO_PARAM_AACPROFILETYPE params; 5133 InitOMXParams(¶ms); 5134 params.nPortIndex = portIndex; 5135 5136 err = mOMX->getParameter( 5137 mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 5138 if (err != OK) { 5139 return err; 5140 } 5141 5142 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 5143 notify->setInt32("channel-count", params.nChannels); 5144 notify->setInt32("sample-rate", params.nSampleRate); 5145 break; 5146 } 5147 5148 case OMX_AUDIO_CodingAMR: 5149 { 5150 OMX_AUDIO_PARAM_AMRTYPE params; 5151 InitOMXParams(¶ms); 5152 params.nPortIndex = portIndex; 5153 5154 err = mOMX->getParameter( 5155 mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 5156 if (err != OK) { 5157 return err; 5158 } 5159 5160 notify->setInt32("channel-count", 1); 5161 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 5162 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 5163 notify->setInt32("sample-rate", 16000); 5164 } else { 5165 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 5166 notify->setInt32("sample-rate", 8000); 5167 } 5168 break; 5169 } 5170 5171 case OMX_AUDIO_CodingFLAC: 5172 { 5173 OMX_AUDIO_PARAM_FLACTYPE params; 5174 InitOMXParams(¶ms); 5175 params.nPortIndex = portIndex; 5176 5177 err = mOMX->getParameter( 5178 mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 5179 if (err != OK) { 5180 return err; 5181 } 5182 5183 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 5184 notify->setInt32("channel-count", params.nChannels); 5185 notify->setInt32("sample-rate", params.nSampleRate); 5186 break; 5187 } 5188 5189 case OMX_AUDIO_CodingMP3: 5190 { 5191 OMX_AUDIO_PARAM_MP3TYPE params; 5192 InitOMXParams(¶ms); 5193 params.nPortIndex = portIndex; 5194 5195 err = mOMX->getParameter( 5196 mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 5197 if (err != OK) { 5198 return err; 5199 } 5200 5201 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 5202 notify->setInt32("channel-count", params.nChannels); 5203 notify->setInt32("sample-rate", params.nSampleRate); 5204 break; 5205 } 5206 5207 case OMX_AUDIO_CodingVORBIS: 5208 { 5209 OMX_AUDIO_PARAM_VORBISTYPE params; 5210 InitOMXParams(¶ms); 5211 params.nPortIndex = portIndex; 5212 5213 err = mOMX->getParameter( 5214 mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 5215 if (err != OK) { 5216 return err; 5217 } 5218 5219 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 5220 notify->setInt32("channel-count", params.nChannels); 5221 notify->setInt32("sample-rate", params.nSampleRate); 5222 break; 5223 } 5224 5225 case OMX_AUDIO_CodingAndroidAC3: 5226 { 5227 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 5228 InitOMXParams(¶ms); 5229 params.nPortIndex = portIndex; 5230 5231 err = mOMX->getParameter( 5232 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 5233 ¶ms, sizeof(params)); 5234 if (err != OK) { 5235 return err; 5236 } 5237 5238 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 5239 notify->setInt32("channel-count", params.nChannels); 5240 notify->setInt32("sample-rate", params.nSampleRate); 5241 break; 5242 } 5243 5244 case OMX_AUDIO_CodingAndroidEAC3: 5245 { 5246 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 5247 InitOMXParams(¶ms); 5248 params.nPortIndex = portIndex; 5249 5250 err = mOMX->getParameter( 5251 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 5252 ¶ms, sizeof(params)); 5253 if (err != OK) { 5254 return err; 5255 } 5256 5257 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 5258 notify->setInt32("channel-count", params.nChannels); 5259 notify->setInt32("sample-rate", params.nSampleRate); 5260 break; 5261 } 5262 5263 case OMX_AUDIO_CodingAndroidOPUS: 5264 { 5265 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 5266 InitOMXParams(¶ms); 5267 params.nPortIndex = portIndex; 5268 5269 err = mOMX->getParameter( 5270 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 5271 ¶ms, sizeof(params)); 5272 if (err != OK) { 5273 return err; 5274 } 5275 5276 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 5277 notify->setInt32("channel-count", params.nChannels); 5278 notify->setInt32("sample-rate", params.nSampleRate); 5279 break; 5280 } 5281 5282 case OMX_AUDIO_CodingG711: 5283 { 5284 OMX_AUDIO_PARAM_PCMMODETYPE params; 5285 InitOMXParams(¶ms); 5286 params.nPortIndex = portIndex; 5287 5288 err = mOMX->getParameter( 5289 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5290 if (err != OK) { 5291 return err; 5292 } 5293 5294 const char *mime = NULL; 5295 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 5296 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 5297 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 5298 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 5299 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 5300 mime = MEDIA_MIMETYPE_AUDIO_RAW; 5301 } 5302 notify->setString("mime", mime); 5303 notify->setInt32("channel-count", params.nChannels); 5304 notify->setInt32("sample-rate", params.nSamplingRate); 5305 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 5306 break; 5307 } 5308 5309 case OMX_AUDIO_CodingGSMFR: 5310 { 5311 OMX_AUDIO_PARAM_PCMMODETYPE params; 5312 InitOMXParams(¶ms); 5313 params.nPortIndex = portIndex; 5314 5315 err = mOMX->getParameter( 5316 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5317 if (err != OK) { 5318 return err; 5319 } 5320 5321 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 5322 notify->setInt32("channel-count", params.nChannels); 5323 notify->setInt32("sample-rate", params.nSamplingRate); 5324 break; 5325 } 5326 5327 default: 5328 ALOGE("Unsupported audio coding: %s(%d)\n", 5329 asString(audioDef->eEncoding), audioDef->eEncoding); 5330 return BAD_TYPE; 5331 } 5332 break; 5333 } 5334 5335 default: 5336 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 5337 return BAD_TYPE; 5338 } 5339 5340 return OK; 5341} 5342 5343void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 5344 // aspects are normally communicated in ColorAspects 5345 int32_t range, standard, transfer; 5346 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 5347 5348 // if some aspects are unspecified, use dataspace fields 5349 if (range != 0) { 5350 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 5351 } 5352 if (standard != 0) { 5353 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 5354 } 5355 if (transfer != 0) { 5356 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 5357 } 5358 5359 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 5360 if (range != 0) { 5361 mOutputFormat->setInt32("color-range", range); 5362 } 5363 if (standard != 0) { 5364 mOutputFormat->setInt32("color-standard", standard); 5365 } 5366 if (transfer != 0) { 5367 mOutputFormat->setInt32("color-transfer", transfer); 5368 } 5369 5370 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 5371 "(R:%d(%s), S:%d(%s), T:%d(%s))", 5372 dataSpace, 5373 aspects.mRange, asString(aspects.mRange), 5374 aspects.mPrimaries, asString(aspects.mPrimaries), 5375 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 5376 aspects.mTransfer, asString(aspects.mTransfer), 5377 range, asString((ColorRange)range), 5378 standard, asString((ColorStandard)standard), 5379 transfer, asString((ColorTransfer)transfer)); 5380} 5381 5382void ACodec::onOutputFormatChanged(sp<const AMessage> expectedFormat) { 5383 // store new output format, at the same time mark that this is no longer the first frame 5384 mOutputFormat = mBaseOutputFormat->dup(); 5385 5386 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 5387 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 5388 return; 5389 } 5390 5391 if (expectedFormat != NULL) { 5392 sp<const AMessage> changes = expectedFormat->changesFrom(mOutputFormat); 5393 sp<const AMessage> to = mOutputFormat->changesFrom(expectedFormat); 5394 if (changes->countEntries() != 0 || to->countEntries() != 0) { 5395 ALOGW("[%s] BAD CODEC: Output format changed unexpectedly from (diff) %s to (diff) %s", 5396 mComponentName.c_str(), 5397 changes->debugString(4).c_str(), to->debugString(4).c_str()); 5398 } 5399 } 5400 5401 if (!mIsVideo && !mIsEncoder) { 5402 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 5403 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5404 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 5405 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5406 5407 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 5408 if (mConverter[kPortIndexOutput] != NULL) { 5409 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 5410 } 5411 } 5412 5413 if (mTunneled) { 5414 sendFormatChange(); 5415 } 5416} 5417 5418void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 5419 AString mime; 5420 CHECK(mOutputFormat->findString("mime", &mime)); 5421 5422 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 5423 // notify renderer of the crop change and dataspace change 5424 // NOTE: native window uses extended right-bottom coordinate 5425 int32_t left, top, right, bottom; 5426 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 5427 notify->setRect("crop", left, top, right + 1, bottom + 1); 5428 } 5429 5430 int32_t dataSpace; 5431 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 5432 notify->setInt32("dataspace", dataSpace); 5433 } 5434 } 5435} 5436 5437void ACodec::sendFormatChange() { 5438 AString mime; 5439 CHECK(mOutputFormat->findString("mime", &mime)); 5440 5441 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 5442 int32_t channelCount; 5443 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 5444 if (mSkipCutBuffer != NULL) { 5445 size_t prevbufsize = mSkipCutBuffer->size(); 5446 if (prevbufsize != 0) { 5447 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 5448 } 5449 } 5450 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 5451 } 5452 5453 sp<AMessage> notify = mNotify->dup(); 5454 notify->setInt32("what", kWhatOutputFormatChanged); 5455 notify->setMessage("format", mOutputFormat); 5456 notify->post(); 5457 5458 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 5459 mLastOutputFormat = mOutputFormat; 5460} 5461 5462void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 5463 sp<AMessage> notify = mNotify->dup(); 5464 notify->setInt32("what", CodecBase::kWhatError); 5465 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 5466 5467 if (internalError == UNKNOWN_ERROR) { // find better error code 5468 const status_t omxStatus = statusFromOMXError(error); 5469 if (omxStatus != 0) { 5470 internalError = omxStatus; 5471 } else { 5472 ALOGW("Invalid OMX error %#x", error); 5473 } 5474 } 5475 5476 mFatalError = true; 5477 5478 notify->setInt32("err", internalError); 5479 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5480 notify->post(); 5481} 5482 5483//////////////////////////////////////////////////////////////////////////////// 5484 5485ACodec::PortDescription::PortDescription() { 5486} 5487 5488status_t ACodec::requestIDRFrame() { 5489 if (!mIsEncoder) { 5490 return ERROR_UNSUPPORTED; 5491 } 5492 5493 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5494 InitOMXParams(¶ms); 5495 5496 params.nPortIndex = kPortIndexOutput; 5497 params.IntraRefreshVOP = OMX_TRUE; 5498 5499 return mOMX->setConfig( 5500 mNode, 5501 OMX_IndexConfigVideoIntraVOPRefresh, 5502 ¶ms, 5503 sizeof(params)); 5504} 5505 5506void ACodec::PortDescription::addBuffer( 5507 IOMX::buffer_id id, const sp<ABuffer> &buffer, 5508 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) { 5509 mBufferIDs.push_back(id); 5510 mBuffers.push_back(buffer); 5511 mHandles.push_back(handle); 5512 mMemRefs.push_back(memRef); 5513} 5514 5515size_t ACodec::PortDescription::countBuffers() { 5516 return mBufferIDs.size(); 5517} 5518 5519IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5520 return mBufferIDs.itemAt(index); 5521} 5522 5523sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5524 return mBuffers.itemAt(index); 5525} 5526 5527sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const { 5528 return mHandles.itemAt(index); 5529} 5530 5531sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const { 5532 return mMemRefs.itemAt(index); 5533} 5534 5535//////////////////////////////////////////////////////////////////////////////// 5536 5537ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5538 : AState(parentState), 5539 mCodec(codec) { 5540} 5541 5542ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5543 OMX_U32 /* portIndex */) { 5544 return KEEP_BUFFERS; 5545} 5546 5547bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5548 switch (msg->what()) { 5549 case kWhatInputBufferFilled: 5550 { 5551 onInputBufferFilled(msg); 5552 break; 5553 } 5554 5555 case kWhatOutputBufferDrained: 5556 { 5557 onOutputBufferDrained(msg); 5558 break; 5559 } 5560 5561 case ACodec::kWhatOMXMessageList: 5562 { 5563 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5564 } 5565 5566 case ACodec::kWhatOMXMessageItem: 5567 { 5568 // no need to check as we already did it for kWhatOMXMessageList 5569 return onOMXMessage(msg); 5570 } 5571 5572 case ACodec::kWhatOMXMessage: 5573 { 5574 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5575 } 5576 5577 case ACodec::kWhatSetSurface: 5578 { 5579 sp<AReplyToken> replyID; 5580 CHECK(msg->senderAwaitsResponse(&replyID)); 5581 5582 sp<RefBase> obj; 5583 CHECK(msg->findObject("surface", &obj)); 5584 5585 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5586 5587 sp<AMessage> response = new AMessage; 5588 response->setInt32("err", err); 5589 response->postReply(replyID); 5590 break; 5591 } 5592 5593 case ACodec::kWhatCreateInputSurface: 5594 case ACodec::kWhatSetInputSurface: 5595 case ACodec::kWhatSignalEndOfInputStream: 5596 { 5597 // This may result in an app illegal state exception. 5598 ALOGE("Message 0x%x was not handled", msg->what()); 5599 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5600 return true; 5601 } 5602 5603 case ACodec::kWhatOMXDied: 5604 { 5605 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5606 ALOGE("OMX/mediaserver died, signalling error!"); 5607 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5608 break; 5609 } 5610 5611 case ACodec::kWhatReleaseCodecInstance: 5612 { 5613 ALOGI("[%s] forcing the release of codec", 5614 mCodec->mComponentName.c_str()); 5615 status_t err = mCodec->mOMX->freeNode(mCodec->mNode); 5616 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5617 mCodec->mComponentName.c_str(), err); 5618 sp<AMessage> notify = mCodec->mNotify->dup(); 5619 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5620 notify->post(); 5621 break; 5622 } 5623 5624 default: 5625 return false; 5626 } 5627 5628 return true; 5629} 5630 5631bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5632 // there is a possibility that this is an outstanding message for a 5633 // codec that we have already destroyed 5634 if (mCodec->mNode == 0) { 5635 ALOGI("ignoring message as already freed component: %s", 5636 msg->debugString().c_str()); 5637 return false; 5638 } 5639 5640 IOMX::node_id nodeID; 5641 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 5642 if (nodeID != mCodec->mNode) { 5643 ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode); 5644 return false; 5645 } 5646 return true; 5647} 5648 5649bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5650 sp<RefBase> obj; 5651 CHECK(msg->findObject("messages", &obj)); 5652 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5653 5654 bool receivedRenderedEvents = false; 5655 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5656 it != msgList->getList().cend(); ++it) { 5657 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5658 mCodec->handleMessage(*it); 5659 int32_t type; 5660 CHECK((*it)->findInt32("type", &type)); 5661 if (type == omx_message::FRAME_RENDERED) { 5662 receivedRenderedEvents = true; 5663 } 5664 } 5665 5666 if (receivedRenderedEvents) { 5667 // NOTE: all buffers are rendered in this case 5668 mCodec->notifyOfRenderedFrames(); 5669 } 5670 return true; 5671} 5672 5673bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5674 int32_t type; 5675 CHECK(msg->findInt32("type", &type)); 5676 5677 switch (type) { 5678 case omx_message::EVENT: 5679 { 5680 int32_t event, data1, data2; 5681 CHECK(msg->findInt32("event", &event)); 5682 CHECK(msg->findInt32("data1", &data1)); 5683 CHECK(msg->findInt32("data2", &data2)); 5684 5685 if (event == OMX_EventCmdComplete 5686 && data1 == OMX_CommandFlush 5687 && data2 == (int32_t)OMX_ALL) { 5688 // Use of this notification is not consistent across 5689 // implementations. We'll drop this notification and rely 5690 // on flush-complete notifications on the individual port 5691 // indices instead. 5692 5693 return true; 5694 } 5695 5696 return onOMXEvent( 5697 static_cast<OMX_EVENTTYPE>(event), 5698 static_cast<OMX_U32>(data1), 5699 static_cast<OMX_U32>(data2)); 5700 } 5701 5702 case omx_message::EMPTY_BUFFER_DONE: 5703 { 5704 IOMX::buffer_id bufferID; 5705 int32_t fenceFd; 5706 5707 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5708 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5709 5710 return onOMXEmptyBufferDone(bufferID, fenceFd); 5711 } 5712 5713 case omx_message::FILL_BUFFER_DONE: 5714 { 5715 IOMX::buffer_id bufferID; 5716 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5717 5718 int32_t rangeOffset, rangeLength, flags, fenceFd; 5719 int64_t timeUs; 5720 5721 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5722 CHECK(msg->findInt32("range_length", &rangeLength)); 5723 CHECK(msg->findInt32("flags", &flags)); 5724 CHECK(msg->findInt64("timestamp", &timeUs)); 5725 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5726 5727 return onOMXFillBufferDone( 5728 bufferID, 5729 (size_t)rangeOffset, (size_t)rangeLength, 5730 (OMX_U32)flags, 5731 timeUs, 5732 fenceFd); 5733 } 5734 5735 case omx_message::FRAME_RENDERED: 5736 { 5737 int64_t mediaTimeUs, systemNano; 5738 5739 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5740 CHECK(msg->findInt64("system_nano", &systemNano)); 5741 5742 return onOMXFrameRendered( 5743 mediaTimeUs, systemNano); 5744 } 5745 5746 default: 5747 ALOGE("Unexpected message type: %d", type); 5748 return false; 5749 } 5750} 5751 5752bool ACodec::BaseState::onOMXFrameRendered( 5753 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5754 // ignore outside of Executing and PortSettingsChanged states 5755 return true; 5756} 5757 5758bool ACodec::BaseState::onOMXEvent( 5759 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5760 if (event == OMX_EventDataSpaceChanged) { 5761 ColorAspects aspects; 5762 aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF); 5763 aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF); 5764 aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF); 5765 aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF); 5766 5767 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5768 return true; 5769 } 5770 5771 if (event != OMX_EventError) { 5772 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5773 mCodec->mComponentName.c_str(), event, data1, data2); 5774 5775 return false; 5776 } 5777 5778 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5779 5780 // verify OMX component sends back an error we expect. 5781 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5782 if (!isOMXError(omxError)) { 5783 ALOGW("Invalid OMX error %#x", omxError); 5784 omxError = OMX_ErrorUndefined; 5785 } 5786 mCodec->signalError(omxError); 5787 5788 return true; 5789} 5790 5791bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5792 ALOGV("[%s] onOMXEmptyBufferDone %u", 5793 mCodec->mComponentName.c_str(), bufferID); 5794 5795 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5796 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5797 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5798 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5799 mCodec->dumpBuffers(kPortIndexInput); 5800 if (fenceFd >= 0) { 5801 ::close(fenceFd); 5802 } 5803 return false; 5804 } 5805 info->mStatus = BufferInfo::OWNED_BY_US; 5806 5807 // input buffers cannot take fences, so wait for any fence now 5808 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5809 fenceFd = -1; 5810 5811 // still save fence for completeness 5812 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5813 5814 // We're in "store-metadata-in-buffers" mode, the underlying 5815 // OMX component had access to data that's implicitly refcounted 5816 // by this "MediaBuffer" object. Now that the OMX component has 5817 // told us that it's done with the input buffer, we can decrement 5818 // the mediaBuffer's reference count. 5819 info->mData->setMediaBufferBase(NULL); 5820 5821 PortMode mode = getPortMode(kPortIndexInput); 5822 5823 switch (mode) { 5824 case KEEP_BUFFERS: 5825 break; 5826 5827 case RESUBMIT_BUFFERS: 5828 postFillThisBuffer(info); 5829 break; 5830 5831 case FREE_BUFFERS: 5832 default: 5833 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5834 return false; 5835 } 5836 5837 return true; 5838} 5839 5840void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5841 if (mCodec->mPortEOS[kPortIndexInput]) { 5842 return; 5843 } 5844 5845 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5846 5847 sp<AMessage> notify = mCodec->mNotify->dup(); 5848 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5849 notify->setInt32("buffer-id", info->mBufferID); 5850 5851 info->mData->meta()->clear(); 5852 notify->setBuffer("buffer", info->mData); 5853 5854 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5855 reply->setInt32("buffer-id", info->mBufferID); 5856 5857 notify->setMessage("reply", reply); 5858 5859 notify->post(); 5860 5861 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5862} 5863 5864void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5865 IOMX::buffer_id bufferID; 5866 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5867 sp<ABuffer> buffer; 5868 int32_t err = OK; 5869 bool eos = false; 5870 PortMode mode = getPortMode(kPortIndexInput); 5871 5872 if (!msg->findBuffer("buffer", &buffer)) { 5873 /* these are unfilled buffers returned by client */ 5874 CHECK(msg->findInt32("err", &err)); 5875 5876 if (err == OK) { 5877 /* buffers with no errors are returned on MediaCodec.flush */ 5878 mode = KEEP_BUFFERS; 5879 } else { 5880 ALOGV("[%s] saw error %d instead of an input buffer", 5881 mCodec->mComponentName.c_str(), err); 5882 eos = true; 5883 } 5884 5885 buffer.clear(); 5886 } 5887 5888 int32_t tmp; 5889 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5890 eos = true; 5891 err = ERROR_END_OF_STREAM; 5892 } 5893 5894 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5895 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5896 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5897 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5898 mCodec->dumpBuffers(kPortIndexInput); 5899 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5900 return; 5901 } 5902 5903 info->mStatus = BufferInfo::OWNED_BY_US; 5904 5905 switch (mode) { 5906 case KEEP_BUFFERS: 5907 { 5908 if (eos) { 5909 if (!mCodec->mPortEOS[kPortIndexInput]) { 5910 mCodec->mPortEOS[kPortIndexInput] = true; 5911 mCodec->mInputEOSResult = err; 5912 } 5913 } 5914 break; 5915 } 5916 5917 case RESUBMIT_BUFFERS: 5918 { 5919 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5920 // Do not send empty input buffer w/o EOS to the component. 5921 if (buffer->size() == 0 && !eos) { 5922 postFillThisBuffer(info); 5923 break; 5924 } 5925 5926 int64_t timeUs; 5927 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5928 5929 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5930 5931 MetadataBufferType metaType = mCodec->mInputMetadataType; 5932 int32_t isCSD = 0; 5933 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5934 if (mCodec->mIsLegacyVP9Decoder) { 5935 ALOGV("[%s] is legacy VP9 decoder. Ignore %u codec specific data", 5936 mCodec->mComponentName.c_str(), bufferID); 5937 postFillThisBuffer(info); 5938 break; 5939 } 5940 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5941 metaType = kMetadataBufferTypeInvalid; 5942 } 5943 5944 if (eos) { 5945 flags |= OMX_BUFFERFLAG_EOS; 5946 } 5947 5948 if (buffer != info->mCodecData) { 5949 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5950 mCodec->mComponentName.c_str(), 5951 bufferID, 5952 buffer.get(), info->mCodecData.get()); 5953 5954 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; 5955 if (converter == NULL || isCSD) { 5956 converter = getCopyConverter(); 5957 } 5958 status_t err = converter->convert(buffer, info->mCodecData); 5959 if (err != OK) { 5960 mCodec->signalError(OMX_ErrorUndefined, err); 5961 return; 5962 } 5963 } 5964 5965 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5966 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5967 mCodec->mComponentName.c_str(), bufferID); 5968 } else if (flags & OMX_BUFFERFLAG_EOS) { 5969 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5970 mCodec->mComponentName.c_str(), bufferID); 5971 } else { 5972#if TRACK_BUFFER_TIMING 5973 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5974 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5975#else 5976 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5977 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5978#endif 5979 } 5980 5981#if TRACK_BUFFER_TIMING 5982 ACodec::BufferStats stats; 5983 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5984 stats.mFillBufferDoneTimeUs = -1ll; 5985 mCodec->mBufferStats.add(timeUs, stats); 5986#endif 5987 5988 if (mCodec->storingMetadataInDecodedBuffers()) { 5989 // try to submit an output buffer for each input buffer 5990 PortMode outputMode = getPortMode(kPortIndexOutput); 5991 5992 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 5993 mCodec->mMetadataBuffersToSubmit, 5994 (outputMode == FREE_BUFFERS ? "FREE" : 5995 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 5996 if (outputMode == RESUBMIT_BUFFERS) { 5997 mCodec->submitOutputMetadataBuffer(); 5998 } 5999 } 6000 info->checkReadFence("onInputBufferFilled"); 6001 6002 status_t err2 = OK; 6003 switch (metaType) { 6004 case kMetadataBufferTypeInvalid: 6005 break; 6006#ifndef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 6007 case kMetadataBufferTypeNativeHandleSource: 6008 if (info->mCodecData->size() >= sizeof(VideoNativeHandleMetadata)) { 6009 VideoNativeHandleMetadata *vnhmd = 6010 (VideoNativeHandleMetadata*)info->mCodecData->base(); 6011 err2 = mCodec->mOMX->updateNativeHandleInMeta( 6012 mCodec->mNode, kPortIndexInput, 6013 NativeHandle::create(vnhmd->pHandle, false /* ownsHandle */), 6014 bufferID); 6015 } 6016 break; 6017 case kMetadataBufferTypeANWBuffer: 6018 if (info->mCodecData->size() >= sizeof(VideoNativeMetadata)) { 6019 VideoNativeMetadata *vnmd = (VideoNativeMetadata*)info->mCodecData->base(); 6020 err2 = mCodec->mOMX->updateGraphicBufferInMeta( 6021 mCodec->mNode, kPortIndexInput, 6022 new GraphicBuffer(vnmd->pBuffer, false /* keepOwnership */), 6023 bufferID); 6024 } 6025 break; 6026#endif 6027 default: 6028 ALOGW("Can't marshall %s data in %zu sized buffers in %zu-bit mode", 6029 asString(metaType), info->mCodecData->size(), 6030 sizeof(buffer_handle_t) * 8); 6031 err2 = ERROR_UNSUPPORTED; 6032 break; 6033 } 6034 6035 if (err2 == OK) { 6036 err2 = mCodec->mOMX->emptyBuffer( 6037 mCodec->mNode, 6038 bufferID, 6039 0, 6040 info->mCodecData->size(), 6041 flags, 6042 timeUs, 6043 info->mFenceFd); 6044 } 6045 info->mFenceFd = -1; 6046 if (err2 != OK) { 6047 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 6048 return; 6049 } 6050 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6051 6052 if (!eos && err == OK) { 6053 getMoreInputDataIfPossible(); 6054 } else { 6055 ALOGV("[%s] Signalled EOS (%d) on the input port", 6056 mCodec->mComponentName.c_str(), err); 6057 6058 mCodec->mPortEOS[kPortIndexInput] = true; 6059 mCodec->mInputEOSResult = err; 6060 } 6061 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 6062 if (err != OK && err != ERROR_END_OF_STREAM) { 6063 ALOGV("[%s] Signalling EOS on the input port due to error %d", 6064 mCodec->mComponentName.c_str(), err); 6065 } else { 6066 ALOGV("[%s] Signalling EOS on the input port", 6067 mCodec->mComponentName.c_str()); 6068 } 6069 6070 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 6071 mCodec->mComponentName.c_str(), bufferID); 6072 6073 info->checkReadFence("onInputBufferFilled"); 6074 status_t err2 = mCodec->mOMX->emptyBuffer( 6075 mCodec->mNode, 6076 bufferID, 6077 0, 6078 0, 6079 OMX_BUFFERFLAG_EOS, 6080 0, 6081 info->mFenceFd); 6082 info->mFenceFd = -1; 6083 if (err2 != OK) { 6084 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 6085 return; 6086 } 6087 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6088 6089 mCodec->mPortEOS[kPortIndexInput] = true; 6090 mCodec->mInputEOSResult = err; 6091 } 6092 break; 6093 } 6094 6095 case FREE_BUFFERS: 6096 break; 6097 6098 default: 6099 ALOGE("invalid port mode: %d", mode); 6100 break; 6101 } 6102} 6103 6104void ACodec::BaseState::getMoreInputDataIfPossible() { 6105 if (mCodec->mPortEOS[kPortIndexInput]) { 6106 return; 6107 } 6108 6109 BufferInfo *eligible = NULL; 6110 6111 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 6112 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6113 6114#if 0 6115 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 6116 // There's already a "read" pending. 6117 return; 6118 } 6119#endif 6120 6121 if (info->mStatus == BufferInfo::OWNED_BY_US) { 6122 eligible = info; 6123 } 6124 } 6125 6126 if (eligible == NULL) { 6127 return; 6128 } 6129 6130 postFillThisBuffer(eligible); 6131} 6132 6133bool ACodec::BaseState::onOMXFillBufferDone( 6134 IOMX::buffer_id bufferID, 6135 size_t rangeOffset, size_t rangeLength, 6136 OMX_U32 flags, 6137 int64_t timeUs, 6138 int fenceFd) { 6139 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 6140 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 6141 6142 ssize_t index; 6143 status_t err= OK; 6144 6145#if TRACK_BUFFER_TIMING 6146 index = mCodec->mBufferStats.indexOfKey(timeUs); 6147 if (index >= 0) { 6148 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 6149 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 6150 6151 ALOGI("frame PTS %lld: %lld", 6152 timeUs, 6153 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 6154 6155 mCodec->mBufferStats.removeItemsAt(index); 6156 stats = NULL; 6157 } 6158#endif 6159 6160 BufferInfo *info = 6161 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6162 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6163 if (status != BufferInfo::OWNED_BY_COMPONENT) { 6164 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6165 mCodec->dumpBuffers(kPortIndexOutput); 6166 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6167 if (fenceFd >= 0) { 6168 ::close(fenceFd); 6169 } 6170 return true; 6171 } 6172 6173 info->mDequeuedAt = ++mCodec->mDequeueCounter; 6174 info->mStatus = BufferInfo::OWNED_BY_US; 6175 6176 if (info->mRenderInfo != NULL) { 6177 // The fence for an emptied buffer must have signaled, but there still could be queued 6178 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 6179 // as we will soon requeue this buffer to the surface. While in theory we could still keep 6180 // track of buffers that are requeued to the surface, it is better to add support to the 6181 // buffer-queue to notify us of released buffers and their fences (in the future). 6182 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 6183 } 6184 6185 // byte buffers cannot take fences, so wait for any fence now 6186 if (mCodec->mNativeWindow == NULL) { 6187 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 6188 fenceFd = -1; 6189 } 6190 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 6191 6192 PortMode mode = getPortMode(kPortIndexOutput); 6193 6194 switch (mode) { 6195 case KEEP_BUFFERS: 6196 break; 6197 6198 case RESUBMIT_BUFFERS: 6199 { 6200 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 6201 || mCodec->mPortEOS[kPortIndexOutput])) { 6202 ALOGV("[%s] calling fillBuffer %u", 6203 mCodec->mComponentName.c_str(), info->mBufferID); 6204 6205 err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 6206 info->mFenceFd = -1; 6207 if (err != OK) { 6208 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6209 return true; 6210 } 6211 6212 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6213 break; 6214 } 6215 6216 sp<AMessage> reply = 6217 new AMessage(kWhatOutputBufferDrained, mCodec); 6218 6219 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 6220 // pretend that output format has changed on the first frame (we used to do this) 6221 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) { 6222 mCodec->onOutputFormatChanged(mCodec->mOutputFormat); 6223 } 6224 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 6225 mCodec->sendFormatChange(); 6226 } else if (rangeLength > 0 && mCodec->mNativeWindow != NULL) { 6227 // If potentially rendering onto a surface, always save key format data (crop & 6228 // data space) so that we can set it if and once the buffer is rendered. 6229 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 6230 } 6231 6232 if (mCodec->usingMetadataOnEncoderOutput()) { 6233 native_handle_t *handle = NULL; 6234 VideoNativeHandleMetadata &nativeMeta = 6235 *(VideoNativeHandleMetadata *)info->mData->data(); 6236 if (info->mData->size() >= sizeof(nativeMeta) 6237 && nativeMeta.eType == kMetadataBufferTypeNativeHandleSource) { 6238#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 6239 // handle is only valid on 32-bit/mediaserver process 6240 handle = NULL; 6241#else 6242 handle = (native_handle_t *)nativeMeta.pHandle; 6243#endif 6244 } 6245 info->mData->meta()->setPointer("handle", handle); 6246 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 6247 info->mData->meta()->setInt32("rangeLength", rangeLength); 6248 } else if (info->mData == info->mCodecData) { 6249 info->mData->setRange(rangeOffset, rangeLength); 6250 } else { 6251 info->mCodecData->setRange(rangeOffset, rangeLength); 6252 // in this case we know that mConverter is not null 6253 status_t err = mCodec->mConverter[kPortIndexOutput]->convert( 6254 info->mCodecData, info->mData); 6255 if (err != OK) { 6256 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6257 return true; 6258 } 6259 } 6260#if 0 6261 if (mCodec->mNativeWindow == NULL) { 6262 if (IsIDR(info->mData)) { 6263 ALOGI("IDR frame"); 6264 } 6265 } 6266#endif 6267 6268 if (mCodec->mSkipCutBuffer != NULL) { 6269 mCodec->mSkipCutBuffer->submit(info->mData); 6270 } 6271 info->mData->meta()->setInt64("timeUs", timeUs); 6272 6273 sp<AMessage> notify = mCodec->mNotify->dup(); 6274 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 6275 notify->setInt32("buffer-id", info->mBufferID); 6276 notify->setBuffer("buffer", info->mData); 6277 notify->setInt32("flags", flags); 6278 6279 reply->setInt32("buffer-id", info->mBufferID); 6280 6281 notify->setMessage("reply", reply); 6282 6283 notify->post(); 6284 6285 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 6286 6287 if (flags & OMX_BUFFERFLAG_EOS) { 6288 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 6289 6290 sp<AMessage> notify = mCodec->mNotify->dup(); 6291 notify->setInt32("what", CodecBase::kWhatEOS); 6292 notify->setInt32("err", mCodec->mInputEOSResult); 6293 notify->post(); 6294 6295 mCodec->mPortEOS[kPortIndexOutput] = true; 6296 } 6297 break; 6298 } 6299 6300 case FREE_BUFFERS: 6301 err = mCodec->freeBuffer(kPortIndexOutput, index); 6302 if (err != OK) { 6303 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6304 return true; 6305 } 6306 break; 6307 6308 default: 6309 ALOGE("Invalid port mode: %d", mode); 6310 return false; 6311 } 6312 6313 return true; 6314} 6315 6316void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 6317 IOMX::buffer_id bufferID; 6318 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 6319 ssize_t index; 6320 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6321 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6322 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 6323 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6324 mCodec->dumpBuffers(kPortIndexOutput); 6325 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6326 return; 6327 } 6328 6329 android_native_rect_t crop; 6330 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom) 6331 && memcmp(&crop, &mCodec->mLastNativeWindowCrop, sizeof(crop)) != 0) { 6332 mCodec->mLastNativeWindowCrop = crop; 6333 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 6334 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 6335 } 6336 6337 int32_t dataSpace; 6338 if (msg->findInt32("dataspace", &dataSpace) 6339 && dataSpace != mCodec->mLastNativeWindowDataSpace) { 6340 status_t err = native_window_set_buffers_data_space( 6341 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 6342 mCodec->mLastNativeWindowDataSpace = dataSpace; 6343 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 6344 } 6345 6346 int32_t render; 6347 if (mCodec->mNativeWindow != NULL 6348 && msg->findInt32("render", &render) && render != 0 6349 && info->mData != NULL && info->mData->size() != 0) { 6350 ATRACE_NAME("render"); 6351 // The client wants this buffer to be rendered. 6352 6353 // save buffers sent to the surface so we can get render time when they return 6354 int64_t mediaTimeUs = -1; 6355 info->mData->meta()->findInt64("timeUs", &mediaTimeUs); 6356 if (mediaTimeUs >= 0) { 6357 mCodec->mRenderTracker.onFrameQueued( 6358 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 6359 } 6360 6361 int64_t timestampNs = 0; 6362 if (!msg->findInt64("timestampNs", ×tampNs)) { 6363 // use media timestamp if client did not request a specific render timestamp 6364 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 6365 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 6366 timestampNs *= 1000; 6367 } 6368 } 6369 6370 status_t err; 6371 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 6372 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 6373 6374 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 6375 err = mCodec->mNativeWindow->queueBuffer( 6376 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 6377 info->mFenceFd = -1; 6378 if (err == OK) { 6379 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 6380 } else { 6381 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 6382 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6383 info->mStatus = BufferInfo::OWNED_BY_US; 6384 // keeping read fence as write fence to avoid clobbering 6385 info->mIsReadFence = false; 6386 } 6387 } else { 6388 if (mCodec->mNativeWindow != NULL && 6389 (info->mData == NULL || info->mData->size() != 0)) { 6390 // move read fence into write fence to avoid clobbering 6391 info->mIsReadFence = false; 6392 ATRACE_NAME("frame-drop"); 6393 } 6394 info->mStatus = BufferInfo::OWNED_BY_US; 6395 } 6396 6397 PortMode mode = getPortMode(kPortIndexOutput); 6398 6399 switch (mode) { 6400 case KEEP_BUFFERS: 6401 { 6402 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 6403 6404 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6405 // We cannot resubmit the buffer we just rendered, dequeue 6406 // the spare instead. 6407 6408 info = mCodec->dequeueBufferFromNativeWindow(); 6409 } 6410 break; 6411 } 6412 6413 case RESUBMIT_BUFFERS: 6414 { 6415 if (!mCodec->mPortEOS[kPortIndexOutput]) { 6416 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6417 // We cannot resubmit the buffer we just rendered, dequeue 6418 // the spare instead. 6419 6420 info = mCodec->dequeueBufferFromNativeWindow(); 6421 } 6422 6423 if (info != NULL) { 6424 ALOGV("[%s] calling fillBuffer %u", 6425 mCodec->mComponentName.c_str(), info->mBufferID); 6426 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 6427 status_t err = mCodec->mOMX->fillBuffer( 6428 mCodec->mNode, info->mBufferID, info->mFenceFd); 6429 info->mFenceFd = -1; 6430 if (err == OK) { 6431 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6432 } else { 6433 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6434 } 6435 } 6436 } 6437 break; 6438 } 6439 6440 case FREE_BUFFERS: 6441 { 6442 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 6443 if (err != OK) { 6444 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6445 } 6446 break; 6447 } 6448 6449 default: 6450 ALOGE("Invalid port mode: %d", mode); 6451 return; 6452 } 6453} 6454 6455//////////////////////////////////////////////////////////////////////////////// 6456 6457ACodec::UninitializedState::UninitializedState(ACodec *codec) 6458 : BaseState(codec) { 6459} 6460 6461void ACodec::UninitializedState::stateEntered() { 6462 ALOGV("Now uninitialized"); 6463 6464 if (mDeathNotifier != NULL) { 6465 mCodec->mNodeBinder->unlinkToDeath(mDeathNotifier); 6466 mDeathNotifier.clear(); 6467 } 6468 6469 mCodec->mUsingNativeWindow = false; 6470 mCodec->mNativeWindow.clear(); 6471 mCodec->mNativeWindowUsageBits = 0; 6472 mCodec->mNode = 0; 6473 mCodec->mOMX.clear(); 6474 mCodec->mQuirks = 0; 6475 mCodec->mFlags = 0; 6476 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 6477 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 6478 mCodec->mConverter[0].clear(); 6479 mCodec->mConverter[1].clear(); 6480 mCodec->mComponentName.clear(); 6481} 6482 6483bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 6484 bool handled = false; 6485 6486 switch (msg->what()) { 6487 case ACodec::kWhatSetup: 6488 { 6489 onSetup(msg); 6490 6491 handled = true; 6492 break; 6493 } 6494 6495 case ACodec::kWhatAllocateComponent: 6496 { 6497 onAllocateComponent(msg); 6498 handled = true; 6499 break; 6500 } 6501 6502 case ACodec::kWhatShutdown: 6503 { 6504 int32_t keepComponentAllocated; 6505 CHECK(msg->findInt32( 6506 "keepComponentAllocated", &keepComponentAllocated)); 6507 ALOGW_IF(keepComponentAllocated, 6508 "cannot keep component allocated on shutdown in Uninitialized state"); 6509 6510 sp<AMessage> notify = mCodec->mNotify->dup(); 6511 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6512 notify->post(); 6513 6514 handled = true; 6515 break; 6516 } 6517 6518 case ACodec::kWhatFlush: 6519 { 6520 sp<AMessage> notify = mCodec->mNotify->dup(); 6521 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6522 notify->post(); 6523 6524 handled = true; 6525 break; 6526 } 6527 6528 case ACodec::kWhatReleaseCodecInstance: 6529 { 6530 // nothing to do, as we have already signaled shutdown 6531 handled = true; 6532 break; 6533 } 6534 6535 default: 6536 return BaseState::onMessageReceived(msg); 6537 } 6538 6539 return handled; 6540} 6541 6542void ACodec::UninitializedState::onSetup( 6543 const sp<AMessage> &msg) { 6544 if (onAllocateComponent(msg) 6545 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6546 mCodec->mLoadedState->onStart(); 6547 } 6548} 6549 6550bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6551 ALOGV("onAllocateComponent"); 6552 6553 CHECK(mCodec->mNode == 0); 6554 6555 OMXClient client; 6556 if (client.connect() != OK) { 6557 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6558 return false; 6559 } 6560 6561 sp<IOMX> omx = client.interface(); 6562 6563 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6564 6565 Vector<AString> matchingCodecs; 6566 6567 AString mime; 6568 6569 AString componentName; 6570 uint32_t quirks = 0; 6571 int32_t encoder = false; 6572 if (msg->findString("componentName", &componentName)) { 6573 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6574 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6575 matchingCodecs.add(componentName); 6576 } 6577 } else { 6578 CHECK(msg->findString("mime", &mime)); 6579 6580 if (!msg->findInt32("encoder", &encoder)) { 6581 encoder = false; 6582 } 6583 6584 MediaCodecList::findMatchingCodecs( 6585 mime.c_str(), 6586 encoder, // createEncoder 6587 0, // flags 6588 &matchingCodecs); 6589 } 6590 6591 sp<CodecObserver> observer = new CodecObserver; 6592 IOMX::node_id node = 0; 6593 6594 status_t err = NAME_NOT_FOUND; 6595 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6596 ++matchIndex) { 6597 componentName = matchingCodecs[matchIndex]; 6598 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6599 6600 pid_t tid = gettid(); 6601 int prevPriority = androidGetThreadPriority(tid); 6602 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6603 err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node); 6604 androidSetThreadPriority(tid, prevPriority); 6605 6606 if (err == OK) { 6607 break; 6608 } else { 6609 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6610 } 6611 6612 node = 0; 6613 } 6614 6615 if (node == 0) { 6616 if (!mime.empty()) { 6617 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6618 encoder ? "en" : "de", mime.c_str(), err); 6619 } else { 6620 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6621 } 6622 6623 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6624 return false; 6625 } 6626 6627 mDeathNotifier = new DeathNotifier(notify); 6628 if (mCodec->mNodeBinder == NULL || 6629 mCodec->mNodeBinder->linkToDeath(mDeathNotifier) != OK) { 6630 // This was a local binder, if it dies so do we, we won't care 6631 // about any notifications in the afterlife. 6632 mDeathNotifier.clear(); 6633 } 6634 6635 notify = new AMessage(kWhatOMXMessageList, mCodec); 6636 observer->setNotificationMessage(notify); 6637 6638 mCodec->mComponentName = componentName; 6639 mCodec->mRenderTracker.setComponentName(componentName); 6640 mCodec->mFlags = 0; 6641 6642 if (componentName.endsWith(".secure")) { 6643 mCodec->mFlags |= kFlagIsSecure; 6644 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6645 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6646 } 6647 6648 mCodec->mQuirks = quirks; 6649 mCodec->mOMX = omx; 6650 mCodec->mNode = node; 6651 6652 { 6653 sp<AMessage> notify = mCodec->mNotify->dup(); 6654 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6655 notify->setString("componentName", mCodec->mComponentName.c_str()); 6656 notify->post(); 6657 } 6658 6659 mCodec->changeState(mCodec->mLoadedState); 6660 6661 return true; 6662} 6663 6664//////////////////////////////////////////////////////////////////////////////// 6665 6666ACodec::LoadedState::LoadedState(ACodec *codec) 6667 : BaseState(codec) { 6668} 6669 6670void ACodec::LoadedState::stateEntered() { 6671 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6672 6673 mCodec->mPortEOS[kPortIndexInput] = 6674 mCodec->mPortEOS[kPortIndexOutput] = false; 6675 6676 mCodec->mInputEOSResult = OK; 6677 6678 mCodec->mDequeueCounter = 0; 6679 mCodec->mMetadataBuffersToSubmit = 0; 6680 mCodec->mRepeatFrameDelayUs = -1ll; 6681 mCodec->mInputFormat.clear(); 6682 mCodec->mOutputFormat.clear(); 6683 mCodec->mBaseOutputFormat.clear(); 6684 6685 if (mCodec->mShutdownInProgress) { 6686 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6687 6688 mCodec->mShutdownInProgress = false; 6689 mCodec->mKeepComponentAllocated = false; 6690 6691 onShutdown(keepComponentAllocated); 6692 } 6693 mCodec->mExplicitShutdown = false; 6694 6695 mCodec->processDeferredMessages(); 6696} 6697 6698void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6699 if (!keepComponentAllocated) { 6700 (void)mCodec->mOMX->freeNode(mCodec->mNode); 6701 6702 mCodec->changeState(mCodec->mUninitializedState); 6703 } 6704 6705 if (mCodec->mExplicitShutdown) { 6706 sp<AMessage> notify = mCodec->mNotify->dup(); 6707 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6708 notify->post(); 6709 mCodec->mExplicitShutdown = false; 6710 } 6711} 6712 6713bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6714 bool handled = false; 6715 6716 switch (msg->what()) { 6717 case ACodec::kWhatConfigureComponent: 6718 { 6719 onConfigureComponent(msg); 6720 handled = true; 6721 break; 6722 } 6723 6724 case ACodec::kWhatCreateInputSurface: 6725 { 6726 onCreateInputSurface(msg); 6727 handled = true; 6728 break; 6729 } 6730 6731 case ACodec::kWhatSetInputSurface: 6732 { 6733 onSetInputSurface(msg); 6734 handled = true; 6735 break; 6736 } 6737 6738 case ACodec::kWhatStart: 6739 { 6740 onStart(); 6741 handled = true; 6742 break; 6743 } 6744 6745 case ACodec::kWhatShutdown: 6746 { 6747 int32_t keepComponentAllocated; 6748 CHECK(msg->findInt32( 6749 "keepComponentAllocated", &keepComponentAllocated)); 6750 6751 mCodec->mExplicitShutdown = true; 6752 onShutdown(keepComponentAllocated); 6753 6754 handled = true; 6755 break; 6756 } 6757 6758 case ACodec::kWhatFlush: 6759 { 6760 sp<AMessage> notify = mCodec->mNotify->dup(); 6761 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6762 notify->post(); 6763 6764 handled = true; 6765 break; 6766 } 6767 6768 default: 6769 return BaseState::onMessageReceived(msg); 6770 } 6771 6772 return handled; 6773} 6774 6775bool ACodec::LoadedState::onConfigureComponent( 6776 const sp<AMessage> &msg) { 6777 ALOGV("onConfigureComponent"); 6778 6779 CHECK(mCodec->mNode != 0); 6780 6781 status_t err = OK; 6782 AString mime; 6783 if (!msg->findString("mime", &mime)) { 6784 err = BAD_VALUE; 6785 } else { 6786 err = mCodec->configureCodec(mime.c_str(), msg); 6787 } 6788 if (err != OK) { 6789 ALOGE("[%s] configureCodec returning error %d", 6790 mCodec->mComponentName.c_str(), err); 6791 6792 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6793 return false; 6794 } 6795 6796 { 6797 sp<AMessage> notify = mCodec->mNotify->dup(); 6798 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6799 notify->setMessage("input-format", mCodec->mInputFormat); 6800 notify->setMessage("output-format", mCodec->mOutputFormat); 6801 notify->post(); 6802 } 6803 6804 return true; 6805} 6806 6807status_t ACodec::LoadedState::setupInputSurface() { 6808 status_t err = OK; 6809 6810 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6811 err = mCodec->mOMX->setInternalOption( 6812 mCodec->mNode, 6813 kPortIndexInput, 6814 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 6815 &mCodec->mRepeatFrameDelayUs, 6816 sizeof(mCodec->mRepeatFrameDelayUs)); 6817 6818 if (err != OK) { 6819 ALOGE("[%s] Unable to configure option to repeat previous " 6820 "frames (err %d)", 6821 mCodec->mComponentName.c_str(), 6822 err); 6823 return err; 6824 } 6825 } 6826 6827 if (mCodec->mMaxPtsGapUs > 0ll) { 6828 err = mCodec->mOMX->setInternalOption( 6829 mCodec->mNode, 6830 kPortIndexInput, 6831 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 6832 &mCodec->mMaxPtsGapUs, 6833 sizeof(mCodec->mMaxPtsGapUs)); 6834 6835 if (err != OK) { 6836 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6837 mCodec->mComponentName.c_str(), 6838 err); 6839 return err; 6840 } 6841 } 6842 6843 if (mCodec->mMaxFps > 0) { 6844 err = mCodec->mOMX->setInternalOption( 6845 mCodec->mNode, 6846 kPortIndexInput, 6847 IOMX::INTERNAL_OPTION_MAX_FPS, 6848 &mCodec->mMaxFps, 6849 sizeof(mCodec->mMaxFps)); 6850 6851 if (err != OK) { 6852 ALOGE("[%s] Unable to configure max fps (err %d)", 6853 mCodec->mComponentName.c_str(), 6854 err); 6855 return err; 6856 } 6857 } 6858 6859 if (mCodec->mTimePerCaptureUs > 0ll 6860 && mCodec->mTimePerFrameUs > 0ll) { 6861 int64_t timeLapse[2]; 6862 timeLapse[0] = mCodec->mTimePerFrameUs; 6863 timeLapse[1] = mCodec->mTimePerCaptureUs; 6864 err = mCodec->mOMX->setInternalOption( 6865 mCodec->mNode, 6866 kPortIndexInput, 6867 IOMX::INTERNAL_OPTION_TIME_LAPSE, 6868 &timeLapse[0], 6869 sizeof(timeLapse)); 6870 6871 if (err != OK) { 6872 ALOGE("[%s] Unable to configure time lapse (err %d)", 6873 mCodec->mComponentName.c_str(), 6874 err); 6875 return err; 6876 } 6877 } 6878 6879 if (mCodec->mCreateInputBuffersSuspended) { 6880 bool suspend = true; 6881 err = mCodec->mOMX->setInternalOption( 6882 mCodec->mNode, 6883 kPortIndexInput, 6884 IOMX::INTERNAL_OPTION_SUSPEND, 6885 &suspend, 6886 sizeof(suspend)); 6887 6888 if (err != OK) { 6889 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6890 mCodec->mComponentName.c_str(), 6891 err); 6892 return err; 6893 } 6894 } 6895 6896 uint32_t usageBits; 6897 if (mCodec->mOMX->getParameter( 6898 mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6899 &usageBits, sizeof(usageBits)) == OK) { 6900 mCodec->mInputFormat->setInt32( 6901 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6902 } 6903 6904 sp<ABuffer> colorAspectsBuffer; 6905 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6906 err = mCodec->mOMX->setInternalOption( 6907 mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS, 6908 colorAspectsBuffer->base(), colorAspectsBuffer->capacity()); 6909 if (err != OK) { 6910 ALOGE("[%s] Unable to configure color aspects (err %d)", 6911 mCodec->mComponentName.c_str(), err); 6912 return err; 6913 } 6914 } 6915 return OK; 6916} 6917 6918void ACodec::LoadedState::onCreateInputSurface( 6919 const sp<AMessage> & /* msg */) { 6920 ALOGV("onCreateInputSurface"); 6921 6922 sp<AMessage> notify = mCodec->mNotify->dup(); 6923 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6924 6925 android_dataspace dataSpace; 6926 status_t err = 6927 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6928 notify->setMessage("input-format", mCodec->mInputFormat); 6929 notify->setMessage("output-format", mCodec->mOutputFormat); 6930 6931 sp<IGraphicBufferProducer> bufferProducer; 6932 if (err == OK) { 6933 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6934 err = mCodec->mOMX->createInputSurface( 6935 mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, 6936 &mCodec->mInputMetadataType); 6937 // framework uses ANW buffers internally instead of gralloc handles 6938 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6939 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6940 } 6941 } 6942 6943 if (err == OK) { 6944 err = setupInputSurface(); 6945 } 6946 6947 if (err == OK) { 6948 notify->setObject("input-surface", 6949 new BufferProducerWrapper(bufferProducer)); 6950 } else { 6951 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6952 // the error through because it's in the "configured" state. We 6953 // send a kWhatInputSurfaceCreated with an error value instead. 6954 ALOGE("[%s] onCreateInputSurface returning error %d", 6955 mCodec->mComponentName.c_str(), err); 6956 notify->setInt32("err", err); 6957 } 6958 notify->post(); 6959} 6960 6961void ACodec::LoadedState::onSetInputSurface( 6962 const sp<AMessage> &msg) { 6963 ALOGV("onSetInputSurface"); 6964 6965 sp<AMessage> notify = mCodec->mNotify->dup(); 6966 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6967 6968 sp<RefBase> obj; 6969 CHECK(msg->findObject("input-surface", &obj)); 6970 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6971 6972 android_dataspace dataSpace; 6973 status_t err = 6974 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6975 notify->setMessage("input-format", mCodec->mInputFormat); 6976 notify->setMessage("output-format", mCodec->mOutputFormat); 6977 6978 if (err == OK) { 6979 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6980 err = mCodec->mOMX->setInputSurface( 6981 mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(), 6982 &mCodec->mInputMetadataType); 6983 // framework uses ANW buffers internally instead of gralloc handles 6984 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6985 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6986 } 6987 } 6988 6989 if (err == OK) { 6990 surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace); 6991 err = setupInputSurface(); 6992 } 6993 6994 if (err != OK) { 6995 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6996 // the error through because it's in the "configured" state. We 6997 // send a kWhatInputSurfaceAccepted with an error value instead. 6998 ALOGE("[%s] onSetInputSurface returning error %d", 6999 mCodec->mComponentName.c_str(), err); 7000 notify->setInt32("err", err); 7001 } 7002 notify->post(); 7003} 7004 7005void ACodec::LoadedState::onStart() { 7006 ALOGV("onStart"); 7007 7008 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 7009 if (err != OK) { 7010 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7011 } else { 7012 mCodec->changeState(mCodec->mLoadedToIdleState); 7013 } 7014} 7015 7016//////////////////////////////////////////////////////////////////////////////// 7017 7018ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 7019 : BaseState(codec) { 7020} 7021 7022void ACodec::LoadedToIdleState::stateEntered() { 7023 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 7024 7025 status_t err; 7026 if ((err = allocateBuffers()) != OK) { 7027 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 7028 "(error 0x%08x)", 7029 err); 7030 7031 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7032 7033 mCodec->mOMX->sendCommand( 7034 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7035 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 7036 mCodec->freeBuffersOnPort(kPortIndexInput); 7037 } 7038 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 7039 mCodec->freeBuffersOnPort(kPortIndexOutput); 7040 } 7041 7042 mCodec->changeState(mCodec->mLoadedState); 7043 } 7044} 7045 7046status_t ACodec::LoadedToIdleState::allocateBuffers() { 7047 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 7048 7049 if (err != OK) { 7050 return err; 7051 } 7052 7053 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 7054} 7055 7056bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7057 switch (msg->what()) { 7058 case kWhatSetParameters: 7059 case kWhatShutdown: 7060 { 7061 mCodec->deferMessage(msg); 7062 return true; 7063 } 7064 7065 case kWhatSignalEndOfInputStream: 7066 { 7067 mCodec->onSignalEndOfInputStream(); 7068 return true; 7069 } 7070 7071 case kWhatResume: 7072 { 7073 // We'll be active soon enough. 7074 return true; 7075 } 7076 7077 case kWhatFlush: 7078 { 7079 // We haven't even started yet, so we're flushed alright... 7080 sp<AMessage> notify = mCodec->mNotify->dup(); 7081 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7082 notify->post(); 7083 return true; 7084 } 7085 7086 default: 7087 return BaseState::onMessageReceived(msg); 7088 } 7089} 7090 7091bool ACodec::LoadedToIdleState::onOMXEvent( 7092 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7093 switch (event) { 7094 case OMX_EventCmdComplete: 7095 { 7096 status_t err = OK; 7097 if (data1 != (OMX_U32)OMX_CommandStateSet 7098 || data2 != (OMX_U32)OMX_StateIdle) { 7099 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 7100 asString((OMX_COMMANDTYPE)data1), data1, 7101 asString((OMX_STATETYPE)data2), data2); 7102 err = FAILED_TRANSACTION; 7103 } 7104 7105 if (err == OK) { 7106 err = mCodec->mOMX->sendCommand( 7107 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting); 7108 } 7109 7110 if (err != OK) { 7111 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7112 } else { 7113 mCodec->changeState(mCodec->mIdleToExecutingState); 7114 } 7115 7116 return true; 7117 } 7118 7119 default: 7120 return BaseState::onOMXEvent(event, data1, data2); 7121 } 7122} 7123 7124//////////////////////////////////////////////////////////////////////////////// 7125 7126ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 7127 : BaseState(codec) { 7128} 7129 7130void ACodec::IdleToExecutingState::stateEntered() { 7131 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 7132} 7133 7134bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 7135 switch (msg->what()) { 7136 case kWhatSetParameters: 7137 case kWhatShutdown: 7138 { 7139 mCodec->deferMessage(msg); 7140 return true; 7141 } 7142 7143 case kWhatResume: 7144 { 7145 // We'll be active soon enough. 7146 return true; 7147 } 7148 7149 case kWhatFlush: 7150 { 7151 // We haven't even started yet, so we're flushed alright... 7152 sp<AMessage> notify = mCodec->mNotify->dup(); 7153 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7154 notify->post(); 7155 7156 return true; 7157 } 7158 7159 case kWhatSignalEndOfInputStream: 7160 { 7161 mCodec->onSignalEndOfInputStream(); 7162 return true; 7163 } 7164 7165 default: 7166 return BaseState::onMessageReceived(msg); 7167 } 7168} 7169 7170bool ACodec::IdleToExecutingState::onOMXEvent( 7171 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7172 switch (event) { 7173 case OMX_EventCmdComplete: 7174 { 7175 if (data1 != (OMX_U32)OMX_CommandStateSet 7176 || data2 != (OMX_U32)OMX_StateExecuting) { 7177 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 7178 asString((OMX_COMMANDTYPE)data1), data1, 7179 asString((OMX_STATETYPE)data2), data2); 7180 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7181 return true; 7182 } 7183 7184 mCodec->mExecutingState->resume(); 7185 mCodec->changeState(mCodec->mExecutingState); 7186 7187 return true; 7188 } 7189 7190 default: 7191 return BaseState::onOMXEvent(event, data1, data2); 7192 } 7193} 7194 7195//////////////////////////////////////////////////////////////////////////////// 7196 7197ACodec::ExecutingState::ExecutingState(ACodec *codec) 7198 : BaseState(codec), 7199 mActive(false) { 7200} 7201 7202ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 7203 OMX_U32 /* portIndex */) { 7204 return RESUBMIT_BUFFERS; 7205} 7206 7207void ACodec::ExecutingState::submitOutputMetaBuffers() { 7208 // submit as many buffers as there are input buffers with the codec 7209 // in case we are in port reconfiguring 7210 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 7211 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7212 7213 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 7214 if (mCodec->submitOutputMetadataBuffer() != OK) 7215 break; 7216 } 7217 } 7218 7219 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7220 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7221} 7222 7223void ACodec::ExecutingState::submitRegularOutputBuffers() { 7224 bool failed = false; 7225 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 7226 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 7227 7228 if (mCodec->mNativeWindow != NULL) { 7229 if (info->mStatus != BufferInfo::OWNED_BY_US 7230 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 7231 ALOGE("buffers should be owned by us or the surface"); 7232 failed = true; 7233 break; 7234 } 7235 7236 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 7237 continue; 7238 } 7239 } else { 7240 if (info->mStatus != BufferInfo::OWNED_BY_US) { 7241 ALOGE("buffers should be owned by us"); 7242 failed = true; 7243 break; 7244 } 7245 } 7246 7247 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 7248 7249 info->checkWriteFence("submitRegularOutputBuffers"); 7250 status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 7251 info->mFenceFd = -1; 7252 if (err != OK) { 7253 failed = true; 7254 break; 7255 } 7256 7257 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 7258 } 7259 7260 if (failed) { 7261 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7262 } 7263} 7264 7265void ACodec::ExecutingState::submitOutputBuffers() { 7266 submitRegularOutputBuffers(); 7267 if (mCodec->storingMetadataInDecodedBuffers()) { 7268 submitOutputMetaBuffers(); 7269 } 7270} 7271 7272void ACodec::ExecutingState::resume() { 7273 if (mActive) { 7274 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 7275 return; 7276 } 7277 7278 submitOutputBuffers(); 7279 7280 // Post all available input buffers 7281 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 7282 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 7283 } 7284 7285 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 7286 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7287 if (info->mStatus == BufferInfo::OWNED_BY_US) { 7288 postFillThisBuffer(info); 7289 } 7290 } 7291 7292 mActive = true; 7293} 7294 7295void ACodec::ExecutingState::stateEntered() { 7296 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 7297 7298 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7299 mCodec->processDeferredMessages(); 7300} 7301 7302bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 7303 bool handled = false; 7304 7305 switch (msg->what()) { 7306 case kWhatShutdown: 7307 { 7308 int32_t keepComponentAllocated; 7309 CHECK(msg->findInt32( 7310 "keepComponentAllocated", &keepComponentAllocated)); 7311 7312 mCodec->mShutdownInProgress = true; 7313 mCodec->mExplicitShutdown = true; 7314 mCodec->mKeepComponentAllocated = keepComponentAllocated; 7315 7316 mActive = false; 7317 7318 status_t err = mCodec->mOMX->sendCommand( 7319 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 7320 if (err != OK) { 7321 if (keepComponentAllocated) { 7322 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7323 } 7324 // TODO: do some recovery here. 7325 } else { 7326 mCodec->changeState(mCodec->mExecutingToIdleState); 7327 } 7328 7329 handled = true; 7330 break; 7331 } 7332 7333 case kWhatFlush: 7334 { 7335 ALOGV("[%s] ExecutingState flushing now " 7336 "(codec owns %zu/%zu input, %zu/%zu output).", 7337 mCodec->mComponentName.c_str(), 7338 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 7339 mCodec->mBuffers[kPortIndexInput].size(), 7340 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 7341 mCodec->mBuffers[kPortIndexOutput].size()); 7342 7343 mActive = false; 7344 7345 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL); 7346 if (err != OK) { 7347 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7348 } else { 7349 mCodec->changeState(mCodec->mFlushingState); 7350 } 7351 7352 handled = true; 7353 break; 7354 } 7355 7356 case kWhatResume: 7357 { 7358 resume(); 7359 7360 handled = true; 7361 break; 7362 } 7363 7364 case kWhatRequestIDRFrame: 7365 { 7366 status_t err = mCodec->requestIDRFrame(); 7367 if (err != OK) { 7368 ALOGW("Requesting an IDR frame failed."); 7369 } 7370 7371 handled = true; 7372 break; 7373 } 7374 7375 case kWhatSetParameters: 7376 { 7377 sp<AMessage> params; 7378 CHECK(msg->findMessage("params", ¶ms)); 7379 7380 status_t err = mCodec->setParameters(params); 7381 7382 sp<AMessage> reply; 7383 if (msg->findMessage("reply", &reply)) { 7384 reply->setInt32("err", err); 7385 reply->post(); 7386 } 7387 7388 handled = true; 7389 break; 7390 } 7391 7392 case ACodec::kWhatSignalEndOfInputStream: 7393 { 7394 mCodec->onSignalEndOfInputStream(); 7395 handled = true; 7396 break; 7397 } 7398 7399 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7400 case kWhatSubmitOutputMetadataBufferIfEOS: 7401 { 7402 if (mCodec->mPortEOS[kPortIndexInput] && 7403 !mCodec->mPortEOS[kPortIndexOutput]) { 7404 status_t err = mCodec->submitOutputMetadataBuffer(); 7405 if (err == OK) { 7406 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7407 } 7408 } 7409 return true; 7410 } 7411 7412 default: 7413 handled = BaseState::onMessageReceived(msg); 7414 break; 7415 } 7416 7417 return handled; 7418} 7419 7420status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 7421 int32_t videoBitrate; 7422 if (params->findInt32("video-bitrate", &videoBitrate)) { 7423 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 7424 InitOMXParams(&configParams); 7425 configParams.nPortIndex = kPortIndexOutput; 7426 configParams.nEncodeBitrate = videoBitrate; 7427 7428 status_t err = mOMX->setConfig( 7429 mNode, 7430 OMX_IndexConfigVideoBitrate, 7431 &configParams, 7432 sizeof(configParams)); 7433 7434 if (err != OK) { 7435 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 7436 videoBitrate, err); 7437 7438 return err; 7439 } 7440 } 7441 7442 int64_t skipFramesBeforeUs; 7443 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 7444 status_t err = 7445 mOMX->setInternalOption( 7446 mNode, 7447 kPortIndexInput, 7448 IOMX::INTERNAL_OPTION_START_TIME, 7449 &skipFramesBeforeUs, 7450 sizeof(skipFramesBeforeUs)); 7451 7452 if (err != OK) { 7453 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 7454 return err; 7455 } 7456 } 7457 7458 int32_t dropInputFrames; 7459 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 7460 bool suspend = dropInputFrames != 0; 7461 7462 status_t err = 7463 mOMX->setInternalOption( 7464 mNode, 7465 kPortIndexInput, 7466 IOMX::INTERNAL_OPTION_SUSPEND, 7467 &suspend, 7468 sizeof(suspend)); 7469 7470 if (err != OK) { 7471 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 7472 return err; 7473 } 7474 } 7475 7476 int32_t dummy; 7477 if (params->findInt32("request-sync", &dummy)) { 7478 status_t err = requestIDRFrame(); 7479 7480 if (err != OK) { 7481 ALOGE("Requesting a sync frame failed w/ err %d", err); 7482 return err; 7483 } 7484 } 7485 7486 float rate; 7487 if (params->findFloat("operating-rate", &rate) && rate > 0) { 7488 status_t err = setOperatingRate(rate, mIsVideo); 7489 if (err != OK) { 7490 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 7491 return err; 7492 } 7493 } 7494 7495 int32_t intraRefreshPeriod = 0; 7496 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 7497 && intraRefreshPeriod > 0) { 7498 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 7499 if (err != OK) { 7500 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 7501 mComponentName.c_str()); 7502 err = OK; 7503 } 7504 } 7505 7506 status_t err = configureTemporalLayers(params, false /* inConfigure */, mOutputFormat); 7507 if (err != OK) { 7508 err = OK; // ignore failure 7509 } 7510 7511 return err; 7512} 7513 7514void ACodec::onSignalEndOfInputStream() { 7515 sp<AMessage> notify = mNotify->dup(); 7516 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 7517 7518 status_t err = mOMX->signalEndOfInputStream(mNode); 7519 if (err != OK) { 7520 notify->setInt32("err", err); 7521 } 7522 notify->post(); 7523} 7524 7525bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 7526 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7527 return true; 7528} 7529 7530bool ACodec::ExecutingState::onOMXEvent( 7531 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7532 switch (event) { 7533 case OMX_EventPortSettingsChanged: 7534 { 7535 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 7536 7537 mCodec->onOutputFormatChanged(); 7538 7539 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 7540 mCodec->mMetadataBuffersToSubmit = 0; 7541 CHECK_EQ(mCodec->mOMX->sendCommand( 7542 mCodec->mNode, 7543 OMX_CommandPortDisable, kPortIndexOutput), 7544 (status_t)OK); 7545 7546 mCodec->freeOutputBuffersNotOwnedByComponent(); 7547 7548 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7549 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7550 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7551 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7552 mCodec->mComponentName.c_str(), data2); 7553 } 7554 7555 return true; 7556 } 7557 7558 case OMX_EventBufferFlag: 7559 { 7560 return true; 7561 } 7562 7563 default: 7564 return BaseState::onOMXEvent(event, data1, data2); 7565 } 7566} 7567 7568//////////////////////////////////////////////////////////////////////////////// 7569 7570ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7571 ACodec *codec) 7572 : BaseState(codec) { 7573} 7574 7575ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7576 OMX_U32 portIndex) { 7577 if (portIndex == kPortIndexOutput) { 7578 return FREE_BUFFERS; 7579 } 7580 7581 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7582 7583 return RESUBMIT_BUFFERS; 7584} 7585 7586bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7587 const sp<AMessage> &msg) { 7588 bool handled = false; 7589 7590 switch (msg->what()) { 7591 case kWhatFlush: 7592 case kWhatShutdown: 7593 case kWhatResume: 7594 case kWhatSetParameters: 7595 { 7596 if (msg->what() == kWhatResume) { 7597 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7598 } 7599 7600 mCodec->deferMessage(msg); 7601 handled = true; 7602 break; 7603 } 7604 7605 default: 7606 handled = BaseState::onMessageReceived(msg); 7607 break; 7608 } 7609 7610 return handled; 7611} 7612 7613void ACodec::OutputPortSettingsChangedState::stateEntered() { 7614 ALOGV("[%s] Now handling output port settings change", 7615 mCodec->mComponentName.c_str()); 7616} 7617 7618bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7619 int64_t mediaTimeUs, nsecs_t systemNano) { 7620 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7621 return true; 7622} 7623 7624bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7625 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7626 switch (event) { 7627 case OMX_EventCmdComplete: 7628 { 7629 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7630 if (data2 != (OMX_U32)kPortIndexOutput) { 7631 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7632 return false; 7633 } 7634 7635 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7636 7637 status_t err = OK; 7638 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7639 ALOGE("disabled port should be empty, but has %zu buffers", 7640 mCodec->mBuffers[kPortIndexOutput].size()); 7641 err = FAILED_TRANSACTION; 7642 } else { 7643 mCodec->mDealer[kPortIndexOutput].clear(); 7644 } 7645 7646 if (err == OK) { 7647 err = mCodec->mOMX->sendCommand( 7648 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput); 7649 } 7650 7651 if (err == OK) { 7652 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7653 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7654 "reconfiguration: (%d)", err); 7655 } 7656 7657 if (err != OK) { 7658 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7659 7660 // This is technically not correct, but appears to be 7661 // the only way to free the component instance. 7662 // Controlled transitioning from excecuting->idle 7663 // and idle->loaded seem impossible probably because 7664 // the output port never finishes re-enabling. 7665 mCodec->mShutdownInProgress = true; 7666 mCodec->mKeepComponentAllocated = false; 7667 mCodec->changeState(mCodec->mLoadedState); 7668 } 7669 7670 return true; 7671 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7672 if (data2 != (OMX_U32)kPortIndexOutput) { 7673 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7674 return false; 7675 } 7676 7677 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7678 7679 if (mCodec->mExecutingState->active()) { 7680 mCodec->mExecutingState->submitOutputBuffers(); 7681 } 7682 7683 mCodec->changeState(mCodec->mExecutingState); 7684 7685 return true; 7686 } 7687 7688 return false; 7689 } 7690 7691 default: 7692 return false; 7693 } 7694} 7695 7696//////////////////////////////////////////////////////////////////////////////// 7697 7698ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7699 : BaseState(codec), 7700 mComponentNowIdle(false) { 7701} 7702 7703bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7704 bool handled = false; 7705 7706 switch (msg->what()) { 7707 case kWhatFlush: 7708 { 7709 // Don't send me a flush request if you previously wanted me 7710 // to shutdown. 7711 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7712 break; 7713 } 7714 7715 case kWhatShutdown: 7716 { 7717 // We're already doing that... 7718 7719 handled = true; 7720 break; 7721 } 7722 7723 default: 7724 handled = BaseState::onMessageReceived(msg); 7725 break; 7726 } 7727 7728 return handled; 7729} 7730 7731void ACodec::ExecutingToIdleState::stateEntered() { 7732 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7733 7734 mComponentNowIdle = false; 7735 mCodec->mLastOutputFormat.clear(); 7736} 7737 7738bool ACodec::ExecutingToIdleState::onOMXEvent( 7739 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7740 switch (event) { 7741 case OMX_EventCmdComplete: 7742 { 7743 if (data1 != (OMX_U32)OMX_CommandStateSet 7744 || data2 != (OMX_U32)OMX_StateIdle) { 7745 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7746 asString((OMX_COMMANDTYPE)data1), data1, 7747 asString((OMX_STATETYPE)data2), data2); 7748 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7749 return true; 7750 } 7751 7752 mComponentNowIdle = true; 7753 7754 changeStateIfWeOwnAllBuffers(); 7755 7756 return true; 7757 } 7758 7759 case OMX_EventPortSettingsChanged: 7760 case OMX_EventBufferFlag: 7761 { 7762 // We're shutting down and don't care about this anymore. 7763 return true; 7764 } 7765 7766 default: 7767 return BaseState::onOMXEvent(event, data1, data2); 7768 } 7769} 7770 7771void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7772 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7773 status_t err = mCodec->mOMX->sendCommand( 7774 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7775 if (err == OK) { 7776 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7777 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7778 if (err == OK) { 7779 err = err2; 7780 } 7781 } 7782 7783 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7784 && mCodec->mNativeWindow != NULL) { 7785 // We push enough 1x1 blank buffers to ensure that one of 7786 // them has made it to the display. This allows the OMX 7787 // component teardown to zero out any protected buffers 7788 // without the risk of scanning out one of those buffers. 7789 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7790 } 7791 7792 if (err != OK) { 7793 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7794 return; 7795 } 7796 7797 mCodec->changeState(mCodec->mIdleToLoadedState); 7798 } 7799} 7800 7801void ACodec::ExecutingToIdleState::onInputBufferFilled( 7802 const sp<AMessage> &msg) { 7803 BaseState::onInputBufferFilled(msg); 7804 7805 changeStateIfWeOwnAllBuffers(); 7806} 7807 7808void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7809 const sp<AMessage> &msg) { 7810 BaseState::onOutputBufferDrained(msg); 7811 7812 changeStateIfWeOwnAllBuffers(); 7813} 7814 7815//////////////////////////////////////////////////////////////////////////////// 7816 7817ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7818 : BaseState(codec) { 7819} 7820 7821bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7822 bool handled = false; 7823 7824 switch (msg->what()) { 7825 case kWhatShutdown: 7826 { 7827 // We're already doing that... 7828 7829 handled = true; 7830 break; 7831 } 7832 7833 case kWhatFlush: 7834 { 7835 // Don't send me a flush request if you previously wanted me 7836 // to shutdown. 7837 ALOGE("Got flush request in IdleToLoadedState"); 7838 break; 7839 } 7840 7841 default: 7842 handled = BaseState::onMessageReceived(msg); 7843 break; 7844 } 7845 7846 return handled; 7847} 7848 7849void ACodec::IdleToLoadedState::stateEntered() { 7850 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7851} 7852 7853bool ACodec::IdleToLoadedState::onOMXEvent( 7854 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7855 switch (event) { 7856 case OMX_EventCmdComplete: 7857 { 7858 if (data1 != (OMX_U32)OMX_CommandStateSet 7859 || data2 != (OMX_U32)OMX_StateLoaded) { 7860 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7861 asString((OMX_COMMANDTYPE)data1), data1, 7862 asString((OMX_STATETYPE)data2), data2); 7863 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7864 return true; 7865 } 7866 7867 mCodec->changeState(mCodec->mLoadedState); 7868 7869 return true; 7870 } 7871 7872 default: 7873 return BaseState::onOMXEvent(event, data1, data2); 7874 } 7875} 7876 7877//////////////////////////////////////////////////////////////////////////////// 7878 7879ACodec::FlushingState::FlushingState(ACodec *codec) 7880 : BaseState(codec) { 7881} 7882 7883void ACodec::FlushingState::stateEntered() { 7884 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7885 7886 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7887} 7888 7889bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7890 bool handled = false; 7891 7892 switch (msg->what()) { 7893 case kWhatShutdown: 7894 { 7895 mCodec->deferMessage(msg); 7896 break; 7897 } 7898 7899 case kWhatFlush: 7900 { 7901 // We're already doing this right now. 7902 handled = true; 7903 break; 7904 } 7905 7906 default: 7907 handled = BaseState::onMessageReceived(msg); 7908 break; 7909 } 7910 7911 return handled; 7912} 7913 7914bool ACodec::FlushingState::onOMXEvent( 7915 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7916 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7917 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7918 7919 switch (event) { 7920 case OMX_EventCmdComplete: 7921 { 7922 if (data1 != (OMX_U32)OMX_CommandFlush) { 7923 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7924 asString((OMX_COMMANDTYPE)data1), data1, data2); 7925 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7926 return true; 7927 } 7928 7929 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7930 if (mFlushComplete[data2]) { 7931 ALOGW("Flush already completed for %s port", 7932 data2 == kPortIndexInput ? "input" : "output"); 7933 return true; 7934 } 7935 mFlushComplete[data2] = true; 7936 7937 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7938 changeStateIfWeOwnAllBuffers(); 7939 } 7940 } else if (data2 == OMX_ALL) { 7941 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7942 ALOGW("received flush complete event for OMX_ALL before ports have been" 7943 "flushed (%d/%d)", 7944 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7945 return false; 7946 } 7947 7948 changeStateIfWeOwnAllBuffers(); 7949 } else { 7950 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7951 } 7952 7953 return true; 7954 } 7955 7956 case OMX_EventPortSettingsChanged: 7957 { 7958 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7959 msg->setInt32("type", omx_message::EVENT); 7960 msg->setInt32("node", mCodec->mNode); 7961 msg->setInt32("event", event); 7962 msg->setInt32("data1", data1); 7963 msg->setInt32("data2", data2); 7964 7965 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7966 mCodec->mComponentName.c_str()); 7967 7968 mCodec->deferMessage(msg); 7969 7970 return true; 7971 } 7972 7973 default: 7974 return BaseState::onOMXEvent(event, data1, data2); 7975 } 7976 7977 return true; 7978} 7979 7980void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7981 BaseState::onOutputBufferDrained(msg); 7982 7983 changeStateIfWeOwnAllBuffers(); 7984} 7985 7986void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7987 BaseState::onInputBufferFilled(msg); 7988 7989 changeStateIfWeOwnAllBuffers(); 7990} 7991 7992void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 7993 if (mFlushComplete[kPortIndexInput] 7994 && mFlushComplete[kPortIndexOutput] 7995 && mCodec->allYourBuffersAreBelongToUs()) { 7996 // We now own all buffers except possibly those still queued with 7997 // the native window for rendering. Let's get those back as well. 7998 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 7999 8000 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 8001 8002 sp<AMessage> notify = mCodec->mNotify->dup(); 8003 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 8004 notify->post(); 8005 8006 mCodec->mPortEOS[kPortIndexInput] = 8007 mCodec->mPortEOS[kPortIndexOutput] = false; 8008 8009 mCodec->mInputEOSResult = OK; 8010 8011 if (mCodec->mSkipCutBuffer != NULL) { 8012 mCodec->mSkipCutBuffer->clear(); 8013 } 8014 8015 mCodec->changeState(mCodec->mExecutingState); 8016 } 8017} 8018 8019status_t ACodec::queryCapabilities( 8020 const AString &name, const AString &mime, bool isEncoder, 8021 sp<MediaCodecInfo::Capabilities> *caps) { 8022 (*caps).clear(); 8023 const char *role = getComponentRole(isEncoder, mime.c_str()); 8024 if (role == NULL) { 8025 return BAD_VALUE; 8026 } 8027 8028 OMXClient client; 8029 status_t err = client.connect(); 8030 if (err != OK) { 8031 return err; 8032 } 8033 8034 sp<IOMX> omx = client.interface(); 8035 sp<CodecObserver> observer = new CodecObserver; 8036 IOMX::node_id node = 0; 8037 8038 err = omx->allocateNode(name.c_str(), observer, NULL, &node); 8039 if (err != OK) { 8040 client.disconnect(); 8041 return err; 8042 } 8043 8044 err = setComponentRole(omx, node, role); 8045 if (err != OK) { 8046 omx->freeNode(node); 8047 client.disconnect(); 8048 return err; 8049 } 8050 8051 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 8052 bool isVideo = mime.startsWithIgnoreCase("video/"); 8053 8054 if (isVideo) { 8055 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 8056 InitOMXParams(¶m); 8057 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 8058 8059 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 8060 param.nProfileIndex = index; 8061 status_t err = omx->getParameter( 8062 node, OMX_IndexParamVideoProfileLevelQuerySupported, 8063 ¶m, sizeof(param)); 8064 if (err != OK) { 8065 break; 8066 } 8067 builder->addProfileLevel(param.eProfile, param.eLevel); 8068 8069 if (index == kMaxIndicesToCheck) { 8070 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 8071 name.c_str(), index, 8072 param.eProfile, param.eLevel); 8073 } 8074 } 8075 8076 // Color format query 8077 // return colors in the order reported by the OMX component 8078 // prefix "flexible" standard ones with the flexible equivalent 8079 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 8080 InitOMXParams(&portFormat); 8081 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 8082 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 8083 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 8084 portFormat.nIndex = index; 8085 status_t err = omx->getParameter( 8086 node, OMX_IndexParamVideoPortFormat, 8087 &portFormat, sizeof(portFormat)); 8088 if (err != OK) { 8089 break; 8090 } 8091 8092 OMX_U32 flexibleEquivalent; 8093 if (isFlexibleColorFormat( 8094 omx, node, portFormat.eColorFormat, false /* usingNativeWindow */, 8095 &flexibleEquivalent)) { 8096 bool marked = false; 8097 for (size_t i = 0; i < supportedColors.size(); ++i) { 8098 if (supportedColors[i] == flexibleEquivalent) { 8099 marked = true; 8100 break; 8101 } 8102 } 8103 if (!marked) { 8104 supportedColors.push(flexibleEquivalent); 8105 builder->addColorFormat(flexibleEquivalent); 8106 } 8107 } 8108 supportedColors.push(portFormat.eColorFormat); 8109 builder->addColorFormat(portFormat.eColorFormat); 8110 8111 if (index == kMaxIndicesToCheck) { 8112 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 8113 name.c_str(), index, 8114 asString(portFormat.eColorFormat), portFormat.eColorFormat); 8115 } 8116 } 8117 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 8118 // More audio codecs if they have profiles. 8119 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 8120 InitOMXParams(¶m); 8121 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 8122 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 8123 param.nProfileIndex = index; 8124 status_t err = omx->getParameter( 8125 node, (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 8126 ¶m, sizeof(param)); 8127 if (err != OK) { 8128 break; 8129 } 8130 // For audio, level is ignored. 8131 builder->addProfileLevel(param.eProfile, 0 /* level */); 8132 8133 if (index == kMaxIndicesToCheck) { 8134 ALOGW("[%s] stopping checking profiles after %u: %x", 8135 name.c_str(), index, 8136 param.eProfile); 8137 } 8138 } 8139 8140 // NOTE: Without Android extensions, OMX does not provide a way to query 8141 // AAC profile support 8142 if (param.nProfileIndex == 0) { 8143 ALOGW("component %s doesn't support profile query.", name.c_str()); 8144 } 8145 } 8146 8147 if (isVideo && !isEncoder) { 8148 native_handle_t *sidebandHandle = NULL; 8149 if (omx->configureVideoTunnelMode( 8150 node, kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 8151 // tunneled playback includes adaptive playback 8152 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 8153 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 8154 } else if (omx->storeMetaDataInBuffers( 8155 node, kPortIndexOutput, OMX_TRUE) == OK || 8156 omx->prepareForAdaptivePlayback( 8157 node, kPortIndexOutput, OMX_TRUE, 8158 1280 /* width */, 720 /* height */) == OK) { 8159 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 8160 } 8161 } 8162 8163 if (isVideo && isEncoder) { 8164 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 8165 InitOMXParams(¶ms); 8166 params.nPortIndex = kPortIndexOutput; 8167 // TODO: should we verify if fallback is supported? 8168 if (omx->getConfig( 8169 node, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 8170 ¶ms, sizeof(params)) == OK) { 8171 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 8172 } 8173 } 8174 8175 *caps = builder; 8176 omx->freeNode(node); 8177 client.disconnect(); 8178 return OK; 8179} 8180 8181// These are supposed be equivalent to the logic in 8182// "audio_channel_out_mask_from_count". 8183//static 8184status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 8185 switch (numChannels) { 8186 case 1: 8187 map[0] = OMX_AUDIO_ChannelCF; 8188 break; 8189 case 2: 8190 map[0] = OMX_AUDIO_ChannelLF; 8191 map[1] = OMX_AUDIO_ChannelRF; 8192 break; 8193 case 3: 8194 map[0] = OMX_AUDIO_ChannelLF; 8195 map[1] = OMX_AUDIO_ChannelRF; 8196 map[2] = OMX_AUDIO_ChannelCF; 8197 break; 8198 case 4: 8199 map[0] = OMX_AUDIO_ChannelLF; 8200 map[1] = OMX_AUDIO_ChannelRF; 8201 map[2] = OMX_AUDIO_ChannelLR; 8202 map[3] = OMX_AUDIO_ChannelRR; 8203 break; 8204 case 5: 8205 map[0] = OMX_AUDIO_ChannelLF; 8206 map[1] = OMX_AUDIO_ChannelRF; 8207 map[2] = OMX_AUDIO_ChannelCF; 8208 map[3] = OMX_AUDIO_ChannelLR; 8209 map[4] = OMX_AUDIO_ChannelRR; 8210 break; 8211 case 6: 8212 map[0] = OMX_AUDIO_ChannelLF; 8213 map[1] = OMX_AUDIO_ChannelRF; 8214 map[2] = OMX_AUDIO_ChannelCF; 8215 map[3] = OMX_AUDIO_ChannelLFE; 8216 map[4] = OMX_AUDIO_ChannelLR; 8217 map[5] = OMX_AUDIO_ChannelRR; 8218 break; 8219 case 7: 8220 map[0] = OMX_AUDIO_ChannelLF; 8221 map[1] = OMX_AUDIO_ChannelRF; 8222 map[2] = OMX_AUDIO_ChannelCF; 8223 map[3] = OMX_AUDIO_ChannelLFE; 8224 map[4] = OMX_AUDIO_ChannelLR; 8225 map[5] = OMX_AUDIO_ChannelRR; 8226 map[6] = OMX_AUDIO_ChannelCS; 8227 break; 8228 case 8: 8229 map[0] = OMX_AUDIO_ChannelLF; 8230 map[1] = OMX_AUDIO_ChannelRF; 8231 map[2] = OMX_AUDIO_ChannelCF; 8232 map[3] = OMX_AUDIO_ChannelLFE; 8233 map[4] = OMX_AUDIO_ChannelLR; 8234 map[5] = OMX_AUDIO_ChannelRR; 8235 map[6] = OMX_AUDIO_ChannelLS; 8236 map[7] = OMX_AUDIO_ChannelRS; 8237 break; 8238 default: 8239 return -EINVAL; 8240 } 8241 8242 return OK; 8243} 8244 8245} // namespace android 8246