ACodec.cpp revision 89cc6d8d3342dc9f9fad8b3825c647e61e018b0e
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47 48#include <OMX_AudioExt.h> 49#include <OMX_VideoExt.h> 50#include <OMX_Component.h> 51#include <OMX_IndexExt.h> 52#include <OMX_AsString.h> 53 54#include "include/avc_utils.h" 55#include "include/DataConverter.h" 56#include "omx/OMXUtils.h" 57 58namespace android { 59 60enum { 61 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 62}; 63 64// OMX errors are directly mapped into status_t range if 65// there is no corresponding MediaError status code. 66// Use the statusFromOMXError(int32_t omxError) function. 67// 68// Currently this is a direct map. 69// See frameworks/native/include/media/openmax/OMX_Core.h 70// 71// Vendor OMX errors from 0x90000000 - 0x9000FFFF 72// Extension OMX errors from 0x8F000000 - 0x90000000 73// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 74// 75 76// returns true if err is a recognized OMX error code. 77// as OMX error is OMX_S32, this is an int32_t type 78static inline bool isOMXError(int32_t err) { 79 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 80} 81 82// converts an OMX error to a status_t 83static inline status_t statusFromOMXError(int32_t omxError) { 84 switch (omxError) { 85 case OMX_ErrorInvalidComponentName: 86 case OMX_ErrorComponentNotFound: 87 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 88 default: 89 return isOMXError(omxError) ? omxError : 0; // no translation required 90 } 91} 92 93// checks and converts status_t to a non-side-effect status_t 94static inline status_t makeNoSideEffectStatus(status_t err) { 95 switch (err) { 96 // the following errors have side effects and may come 97 // from other code modules. Remap for safety reasons. 98 case INVALID_OPERATION: 99 case DEAD_OBJECT: 100 return UNKNOWN_ERROR; 101 default: 102 return err; 103 } 104} 105 106struct MessageList : public RefBase { 107 MessageList() { 108 } 109 virtual ~MessageList() { 110 } 111 std::list<sp<AMessage> > &getList() { return mList; } 112private: 113 std::list<sp<AMessage> > mList; 114 115 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 116}; 117 118static sp<DataConverter> getCopyConverter() { 119 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited 120 static sp<DataConverter> sCopyConverter; // zero-inited 121 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); 122 return sCopyConverter; 123} 124 125struct CodecObserver : public BnOMXObserver { 126 CodecObserver() {} 127 128 void setNotificationMessage(const sp<AMessage> &msg) { 129 mNotify = msg; 130 } 131 132 // from IOMXObserver 133 virtual void onMessages(const std::list<omx_message> &messages) { 134 if (messages.empty()) { 135 return; 136 } 137 138 sp<AMessage> notify = mNotify->dup(); 139 bool first = true; 140 sp<MessageList> msgList = new MessageList(); 141 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 142 it != messages.cend(); ++it) { 143 const omx_message &omx_msg = *it; 144 if (first) { 145 notify->setInt32("node", omx_msg.node); 146 first = false; 147 } 148 149 sp<AMessage> msg = new AMessage; 150 msg->setInt32("type", omx_msg.type); 151 switch (omx_msg.type) { 152 case omx_message::EVENT: 153 { 154 msg->setInt32("event", omx_msg.u.event_data.event); 155 msg->setInt32("data1", omx_msg.u.event_data.data1); 156 msg->setInt32("data2", omx_msg.u.event_data.data2); 157 break; 158 } 159 160 case omx_message::EMPTY_BUFFER_DONE: 161 { 162 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 163 msg->setInt32("fence_fd", omx_msg.fenceFd); 164 break; 165 } 166 167 case omx_message::FILL_BUFFER_DONE: 168 { 169 msg->setInt32( 170 "buffer", omx_msg.u.extended_buffer_data.buffer); 171 msg->setInt32( 172 "range_offset", 173 omx_msg.u.extended_buffer_data.range_offset); 174 msg->setInt32( 175 "range_length", 176 omx_msg.u.extended_buffer_data.range_length); 177 msg->setInt32( 178 "flags", 179 omx_msg.u.extended_buffer_data.flags); 180 msg->setInt64( 181 "timestamp", 182 omx_msg.u.extended_buffer_data.timestamp); 183 msg->setInt32( 184 "fence_fd", omx_msg.fenceFd); 185 break; 186 } 187 188 case omx_message::FRAME_RENDERED: 189 { 190 msg->setInt64( 191 "media_time_us", omx_msg.u.render_data.timestamp); 192 msg->setInt64( 193 "system_nano", omx_msg.u.render_data.nanoTime); 194 break; 195 } 196 197 default: 198 ALOGE("Unrecognized message type: %d", omx_msg.type); 199 break; 200 } 201 msgList->getList().push_back(msg); 202 } 203 notify->setObject("messages", msgList); 204 notify->post(); 205 } 206 207protected: 208 virtual ~CodecObserver() {} 209 210private: 211 sp<AMessage> mNotify; 212 213 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 214}; 215 216//////////////////////////////////////////////////////////////////////////////// 217 218struct ACodec::BaseState : public AState { 219 explicit BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 220 221protected: 222 enum PortMode { 223 KEEP_BUFFERS, 224 RESUBMIT_BUFFERS, 225 FREE_BUFFERS, 226 }; 227 228 ACodec *mCodec; 229 230 virtual PortMode getPortMode(OMX_U32 portIndex); 231 232 virtual bool onMessageReceived(const sp<AMessage> &msg); 233 234 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 235 236 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 237 virtual void onInputBufferFilled(const sp<AMessage> &msg); 238 239 void postFillThisBuffer(BufferInfo *info); 240 241private: 242 // Handles an OMX message. Returns true iff message was handled. 243 bool onOMXMessage(const sp<AMessage> &msg); 244 245 // Handles a list of messages. Returns true iff messages were handled. 246 bool onOMXMessageList(const sp<AMessage> &msg); 247 248 // returns true iff this message is for this component and the component is alive 249 bool checkOMXMessage(const sp<AMessage> &msg); 250 251 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 252 253 bool onOMXFillBufferDone( 254 IOMX::buffer_id bufferID, 255 size_t rangeOffset, size_t rangeLength, 256 OMX_U32 flags, 257 int64_t timeUs, 258 int fenceFd); 259 260 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 261 262 void getMoreInputDataIfPossible(); 263 264 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 265}; 266 267//////////////////////////////////////////////////////////////////////////////// 268 269struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 270 explicit DeathNotifier(const sp<AMessage> ¬ify) 271 : mNotify(notify) { 272 } 273 274 virtual void binderDied(const wp<IBinder> &) { 275 mNotify->post(); 276 } 277 278protected: 279 virtual ~DeathNotifier() {} 280 281private: 282 sp<AMessage> mNotify; 283 284 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 285}; 286 287struct ACodec::UninitializedState : public ACodec::BaseState { 288 explicit UninitializedState(ACodec *codec); 289 290protected: 291 virtual bool onMessageReceived(const sp<AMessage> &msg); 292 virtual void stateEntered(); 293 294private: 295 void onSetup(const sp<AMessage> &msg); 296 bool onAllocateComponent(const sp<AMessage> &msg); 297 298 sp<DeathNotifier> mDeathNotifier; 299 300 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 301}; 302 303//////////////////////////////////////////////////////////////////////////////// 304 305struct ACodec::LoadedState : public ACodec::BaseState { 306 explicit LoadedState(ACodec *codec); 307 308protected: 309 virtual bool onMessageReceived(const sp<AMessage> &msg); 310 virtual void stateEntered(); 311 312private: 313 friend struct ACodec::UninitializedState; 314 315 bool onConfigureComponent(const sp<AMessage> &msg); 316 void onCreateInputSurface(const sp<AMessage> &msg); 317 void onSetInputSurface(const sp<AMessage> &msg); 318 void onStart(); 319 void onShutdown(bool keepComponentAllocated); 320 321 status_t setupInputSurface(); 322 323 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 324}; 325 326//////////////////////////////////////////////////////////////////////////////// 327 328struct ACodec::LoadedToIdleState : public ACodec::BaseState { 329 explicit LoadedToIdleState(ACodec *codec); 330 331protected: 332 virtual bool onMessageReceived(const sp<AMessage> &msg); 333 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 334 virtual void stateEntered(); 335 336private: 337 status_t allocateBuffers(); 338 339 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 340}; 341 342//////////////////////////////////////////////////////////////////////////////// 343 344struct ACodec::IdleToExecutingState : public ACodec::BaseState { 345 explicit IdleToExecutingState(ACodec *codec); 346 347protected: 348 virtual bool onMessageReceived(const sp<AMessage> &msg); 349 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 350 virtual void stateEntered(); 351 352private: 353 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 354}; 355 356//////////////////////////////////////////////////////////////////////////////// 357 358struct ACodec::ExecutingState : public ACodec::BaseState { 359 explicit ExecutingState(ACodec *codec); 360 361 void submitRegularOutputBuffers(); 362 void submitOutputMetaBuffers(); 363 void submitOutputBuffers(); 364 365 // Submit output buffers to the decoder, submit input buffers to client 366 // to fill with data. 367 void resume(); 368 369 // Returns true iff input and output buffers are in play. 370 bool active() const { return mActive; } 371 372protected: 373 virtual PortMode getPortMode(OMX_U32 portIndex); 374 virtual bool onMessageReceived(const sp<AMessage> &msg); 375 virtual void stateEntered(); 376 377 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 378 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 379 380private: 381 bool mActive; 382 383 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 384}; 385 386//////////////////////////////////////////////////////////////////////////////// 387 388struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 389 explicit OutputPortSettingsChangedState(ACodec *codec); 390 391protected: 392 virtual PortMode getPortMode(OMX_U32 portIndex); 393 virtual bool onMessageReceived(const sp<AMessage> &msg); 394 virtual void stateEntered(); 395 396 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 397 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 398 399private: 400 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 401}; 402 403//////////////////////////////////////////////////////////////////////////////// 404 405struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 406 explicit ExecutingToIdleState(ACodec *codec); 407 408protected: 409 virtual bool onMessageReceived(const sp<AMessage> &msg); 410 virtual void stateEntered(); 411 412 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 413 414 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 415 virtual void onInputBufferFilled(const sp<AMessage> &msg); 416 417private: 418 void changeStateIfWeOwnAllBuffers(); 419 420 bool mComponentNowIdle; 421 422 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 423}; 424 425//////////////////////////////////////////////////////////////////////////////// 426 427struct ACodec::IdleToLoadedState : public ACodec::BaseState { 428 explicit IdleToLoadedState(ACodec *codec); 429 430protected: 431 virtual bool onMessageReceived(const sp<AMessage> &msg); 432 virtual void stateEntered(); 433 434 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 435 436private: 437 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 438}; 439 440//////////////////////////////////////////////////////////////////////////////// 441 442struct ACodec::FlushingState : public ACodec::BaseState { 443 explicit FlushingState(ACodec *codec); 444 445protected: 446 virtual bool onMessageReceived(const sp<AMessage> &msg); 447 virtual void stateEntered(); 448 449 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 450 451 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 452 virtual void onInputBufferFilled(const sp<AMessage> &msg); 453 454private: 455 bool mFlushComplete[2]; 456 457 void changeStateIfWeOwnAllBuffers(); 458 459 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 460}; 461 462//////////////////////////////////////////////////////////////////////////////// 463 464void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 465 if (mFenceFd >= 0) { 466 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 467 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 468 } 469 mFenceFd = fenceFd; 470 mIsReadFence = false; 471} 472 473void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 474 if (mFenceFd >= 0) { 475 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 476 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 477 } 478 mFenceFd = fenceFd; 479 mIsReadFence = true; 480} 481 482void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 483 if (mFenceFd >= 0 && mIsReadFence) { 484 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 485 } 486} 487 488void ACodec::BufferInfo::checkReadFence(const char *dbg) { 489 if (mFenceFd >= 0 && !mIsReadFence) { 490 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 491 } 492} 493 494//////////////////////////////////////////////////////////////////////////////// 495 496ACodec::ACodec() 497 : mSampleRate(0), 498 mQuirks(0), 499 mNode(0), 500 mUsingNativeWindow(false), 501 mNativeWindowUsageBits(0), 502 mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN), 503 mIsVideo(false), 504 mIsEncoder(false), 505 mFatalError(false), 506 mShutdownInProgress(false), 507 mExplicitShutdown(false), 508 mIsLegacyVP9Decoder(false), 509 mEncoderDelay(0), 510 mEncoderPadding(0), 511 mRotationDegrees(0), 512 mChannelMaskPresent(false), 513 mChannelMask(0), 514 mDequeueCounter(0), 515 mInputMetadataType(kMetadataBufferTypeInvalid), 516 mOutputMetadataType(kMetadataBufferTypeInvalid), 517 mLegacyAdaptiveExperiment(false), 518 mMetadataBuffersToSubmit(0), 519 mNumUndequeuedBuffers(0), 520 mRepeatFrameDelayUs(-1ll), 521 mMaxPtsGapUs(-1ll), 522 mMaxFps(-1), 523 mTimePerFrameUs(-1ll), 524 mTimePerCaptureUs(-1ll), 525 mCreateInputBuffersSuspended(false), 526 mTunneled(false), 527 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0), 528 mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0) { 529 mUninitializedState = new UninitializedState(this); 530 mLoadedState = new LoadedState(this); 531 mLoadedToIdleState = new LoadedToIdleState(this); 532 mIdleToExecutingState = new IdleToExecutingState(this); 533 mExecutingState = new ExecutingState(this); 534 535 mOutputPortSettingsChangedState = 536 new OutputPortSettingsChangedState(this); 537 538 mExecutingToIdleState = new ExecutingToIdleState(this); 539 mIdleToLoadedState = new IdleToLoadedState(this); 540 mFlushingState = new FlushingState(this); 541 542 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 543 mInputEOSResult = OK; 544 545 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 546 547 changeState(mUninitializedState); 548} 549 550ACodec::~ACodec() { 551} 552 553void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 554 mNotify = msg; 555} 556 557void ACodec::initiateSetup(const sp<AMessage> &msg) { 558 msg->setWhat(kWhatSetup); 559 msg->setTarget(this); 560 msg->post(); 561} 562 563void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 564 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 565 msg->setMessage("params", params); 566 msg->post(); 567} 568 569void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 570 msg->setWhat(kWhatAllocateComponent); 571 msg->setTarget(this); 572 msg->post(); 573} 574 575void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 576 msg->setWhat(kWhatConfigureComponent); 577 msg->setTarget(this); 578 msg->post(); 579} 580 581status_t ACodec::setSurface(const sp<Surface> &surface) { 582 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 583 msg->setObject("surface", surface); 584 585 sp<AMessage> response; 586 status_t err = msg->postAndAwaitResponse(&response); 587 588 if (err == OK) { 589 (void)response->findInt32("err", &err); 590 } 591 return err; 592} 593 594void ACodec::initiateCreateInputSurface() { 595 (new AMessage(kWhatCreateInputSurface, this))->post(); 596} 597 598void ACodec::initiateSetInputSurface( 599 const sp<PersistentSurface> &surface) { 600 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 601 msg->setObject("input-surface", surface); 602 msg->post(); 603} 604 605void ACodec::signalEndOfInputStream() { 606 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 607} 608 609void ACodec::initiateStart() { 610 (new AMessage(kWhatStart, this))->post(); 611} 612 613void ACodec::signalFlush() { 614 ALOGV("[%s] signalFlush", mComponentName.c_str()); 615 (new AMessage(kWhatFlush, this))->post(); 616} 617 618void ACodec::signalResume() { 619 (new AMessage(kWhatResume, this))->post(); 620} 621 622void ACodec::initiateShutdown(bool keepComponentAllocated) { 623 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 624 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 625 msg->post(); 626 if (!keepComponentAllocated) { 627 // ensure shutdown completes in 3 seconds 628 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 629 } 630} 631 632void ACodec::signalRequestIDRFrame() { 633 (new AMessage(kWhatRequestIDRFrame, this))->post(); 634} 635 636// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 637// Some codecs may return input buffers before having them processed. 638// This causes a halt if we already signaled an EOS on the input 639// port. For now keep submitting an output buffer if there was an 640// EOS on the input port, but not yet on the output port. 641void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 642 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 643 mMetadataBuffersToSubmit > 0) { 644 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 645 } 646} 647 648status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 649 // allow keeping unset surface 650 if (surface == NULL) { 651 if (mNativeWindow != NULL) { 652 ALOGW("cannot unset a surface"); 653 return INVALID_OPERATION; 654 } 655 return OK; 656 } 657 658 // cannot switch from bytebuffers to surface 659 if (mNativeWindow == NULL) { 660 ALOGW("component was not configured with a surface"); 661 return INVALID_OPERATION; 662 } 663 664 ANativeWindow *nativeWindow = surface.get(); 665 // if we have not yet started the codec, we can simply set the native window 666 if (mBuffers[kPortIndexInput].size() == 0) { 667 mNativeWindow = surface; 668 return OK; 669 } 670 671 // we do not support changing a tunneled surface after start 672 if (mTunneled) { 673 ALOGW("cannot change tunneled surface"); 674 return INVALID_OPERATION; 675 } 676 677 int usageBits = 0; 678 // no need to reconnect as we will not dequeue all buffers 679 status_t err = setupNativeWindowSizeFormatAndUsage( 680 nativeWindow, &usageBits, 681 !storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment /* reconnect */); 682 if (err != OK) { 683 return err; 684 } 685 686 int ignoredFlags = kVideoGrallocUsage; 687 // New output surface is not allowed to add new usage flag except ignored ones. 688 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 689 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 690 return BAD_VALUE; 691 } 692 693 // get min undequeued count. We cannot switch to a surface that has a higher 694 // undequeued count than we allocated. 695 int minUndequeuedBuffers = 0; 696 err = nativeWindow->query( 697 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 698 &minUndequeuedBuffers); 699 if (err != 0) { 700 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 701 strerror(-err), -err); 702 return err; 703 } 704 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 705 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 706 minUndequeuedBuffers, mNumUndequeuedBuffers); 707 return BAD_VALUE; 708 } 709 710 // we cannot change the number of output buffers while OMX is running 711 // set up surface to the same count 712 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 713 ALOGV("setting up surface for %zu buffers", buffers.size()); 714 715 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 716 if (err != 0) { 717 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 718 -err); 719 return err; 720 } 721 722 // need to enable allocation when attaching 723 surface->getIGraphicBufferProducer()->allowAllocation(true); 724 725 // for meta data mode, we move dequeud buffers to the new surface. 726 // for non-meta mode, we must move all registered buffers 727 for (size_t i = 0; i < buffers.size(); ++i) { 728 const BufferInfo &info = buffers[i]; 729 // skip undequeued buffers for meta data mode 730 if (storingMetadataInDecodedBuffers() 731 && !mLegacyAdaptiveExperiment 732 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 733 ALOGV("skipping buffer"); 734 continue; 735 } 736 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 737 738 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 739 if (err != OK) { 740 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 741 info.mGraphicBuffer->getNativeBuffer(), 742 strerror(-err), -err); 743 return err; 744 } 745 } 746 747 // cancel undequeued buffers to new surface 748 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 749 for (size_t i = 0; i < buffers.size(); ++i) { 750 BufferInfo &info = buffers.editItemAt(i); 751 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 752 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 753 err = nativeWindow->cancelBuffer( 754 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 755 info.mFenceFd = -1; 756 if (err != OK) { 757 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 758 info.mGraphicBuffer->getNativeBuffer(), 759 strerror(-err), -err); 760 return err; 761 } 762 } 763 } 764 // disallow further allocation 765 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 766 } 767 768 // push blank buffers to previous window if requested 769 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 770 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 771 } 772 773 mNativeWindow = nativeWindow; 774 mNativeWindowUsageBits = usageBits; 775 return OK; 776} 777 778status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 779 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 780 781 CHECK(mDealer[portIndex] == NULL); 782 CHECK(mBuffers[portIndex].isEmpty()); 783 784 status_t err; 785 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 786 if (storingMetadataInDecodedBuffers()) { 787 err = allocateOutputMetadataBuffers(); 788 } else { 789 err = allocateOutputBuffersFromNativeWindow(); 790 } 791 } else { 792 OMX_PARAM_PORTDEFINITIONTYPE def; 793 InitOMXParams(&def); 794 def.nPortIndex = portIndex; 795 796 err = mOMX->getParameter( 797 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 798 799 if (err == OK) { 800 MetadataBufferType type = 801 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 802 size_t bufSize = def.nBufferSize; 803 if (type == kMetadataBufferTypeANWBuffer) { 804 bufSize = sizeof(VideoNativeMetadata); 805 } else if (type == kMetadataBufferTypeNativeHandleSource) { 806 bufSize = sizeof(VideoNativeHandleMetadata); 807 } 808 809 // If using gralloc or native source input metadata buffers, allocate largest 810 // metadata size as we prefer to generate native source metadata, but component 811 // may require gralloc source. For camera source, allocate at least enough 812 // size for native metadata buffers. 813 size_t allottedSize = bufSize; 814 if (portIndex == kPortIndexInput && type == kMetadataBufferTypeANWBuffer) { 815 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 816 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 817 bufSize = max(bufSize, sizeof(VideoNativeMetadata)); 818 } 819 820 size_t conversionBufferSize = 0; 821 822 sp<DataConverter> converter = mConverter[portIndex]; 823 if (converter != NULL) { 824 // here we assume sane conversions of max 4:1, so result fits in int32 825 if (portIndex == kPortIndexInput) { 826 conversionBufferSize = converter->sourceSize(bufSize); 827 } else { 828 conversionBufferSize = converter->targetSize(bufSize); 829 } 830 } 831 832 size_t alignment = MemoryDealer::getAllocationAlignment(); 833 834 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", 835 mComponentName.c_str(), 836 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 837 portIndex == kPortIndexInput ? "input" : "output"); 838 839 // verify buffer sizes to avoid overflow in align() 840 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { 841 ALOGE("b/22885421"); 842 return NO_MEMORY; 843 } 844 845 // don't modify bufSize as OMX may not expect it to increase after negotiation 846 size_t alignedSize = align(bufSize, alignment); 847 size_t alignedConvSize = align(conversionBufferSize, alignment); 848 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { 849 ALOGE("b/22885421"); 850 return NO_MEMORY; 851 } 852 853 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); 854 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 855 856 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 857 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 858 if (mem == NULL || mem->pointer() == NULL) { 859 return NO_MEMORY; 860 } 861 862 BufferInfo info; 863 info.mStatus = BufferInfo::OWNED_BY_US; 864 info.mFenceFd = -1; 865 info.mRenderInfo = NULL; 866 info.mNativeHandle = NULL; 867 868 uint32_t requiresAllocateBufferBit = 869 (portIndex == kPortIndexInput) 870 ? kRequiresAllocateBufferOnInputPorts 871 : kRequiresAllocateBufferOnOutputPorts; 872 873 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 874 mem.clear(); 875 876 void *ptr = NULL; 877 sp<NativeHandle> native_handle; 878 err = mOMX->allocateSecureBuffer( 879 mNode, portIndex, bufSize, &info.mBufferID, 880 &ptr, &native_handle); 881 882 // TRICKY: this representation is unorthodox, but ACodec requires 883 // an ABuffer with a proper size to validate range offsets and lengths. 884 // Since mData is never referenced for secure input, it is used to store 885 // either the pointer to the secure buffer, or the opaque handle as on 886 // some devices ptr is actually an opaque handle, not a pointer. 887 888 // TRICKY2: use native handle as the base of the ABuffer if received one, 889 // because Widevine source only receives these base addresses. 890 const native_handle_t *native_handle_ptr = 891 native_handle == NULL ? NULL : native_handle->handle(); 892 info.mData = new ABuffer( 893 ptr != NULL ? ptr : (void *)native_handle_ptr, bufSize); 894 info.mNativeHandle = native_handle; 895 info.mCodecData = info.mData; 896 } else if (mQuirks & requiresAllocateBufferBit) { 897 err = mOMX->allocateBufferWithBackup( 898 mNode, portIndex, mem, &info.mBufferID, allottedSize); 899 } else { 900 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize); 901 } 902 903 if (mem != NULL) { 904 info.mCodecData = new ABuffer(mem->pointer(), bufSize); 905 info.mCodecRef = mem; 906 907 if (type == kMetadataBufferTypeANWBuffer) { 908 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 909 } 910 911 // if we require conversion, allocate conversion buffer for client use; 912 // otherwise, reuse codec buffer 913 if (mConverter[portIndex] != NULL) { 914 CHECK_GT(conversionBufferSize, (size_t)0); 915 mem = mDealer[portIndex]->allocate(conversionBufferSize); 916 if (mem == NULL|| mem->pointer() == NULL) { 917 return NO_MEMORY; 918 } 919 info.mData = new ABuffer(mem->pointer(), conversionBufferSize); 920 info.mMemRef = mem; 921 } else { 922 info.mData = info.mCodecData; 923 info.mMemRef = info.mCodecRef; 924 } 925 } 926 927 mBuffers[portIndex].push(info); 928 } 929 } 930 } 931 932 if (err != OK) { 933 return err; 934 } 935 936 sp<AMessage> notify = mNotify->dup(); 937 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 938 939 notify->setInt32("portIndex", portIndex); 940 941 sp<PortDescription> desc = new PortDescription; 942 943 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 944 const BufferInfo &info = mBuffers[portIndex][i]; 945 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef); 946 } 947 948 notify->setObject("portDesc", desc); 949 notify->post(); 950 951 return OK; 952} 953 954status_t ACodec::setupNativeWindowSizeFormatAndUsage( 955 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */, 956 bool reconnect) { 957 OMX_PARAM_PORTDEFINITIONTYPE def; 958 InitOMXParams(&def); 959 def.nPortIndex = kPortIndexOutput; 960 961 status_t err = mOMX->getParameter( 962 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 963 964 if (err != OK) { 965 return err; 966 } 967 968 OMX_U32 usage = 0; 969 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 970 if (err != 0) { 971 ALOGW("querying usage flags from OMX IL component failed: %d", err); 972 // XXX: Currently this error is logged, but not fatal. 973 usage = 0; 974 } 975 int omxUsage = usage; 976 977 if (mFlags & kFlagIsGrallocUsageProtected) { 978 usage |= GRALLOC_USAGE_PROTECTED; 979 } 980 981 usage |= kVideoGrallocUsage; 982 *finalUsage = usage; 983 984 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 985 mLastNativeWindowDataSpace = HAL_DATASPACE_UNKNOWN; 986 987 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 988 return setNativeWindowSizeFormatAndUsage( 989 nativeWindow, 990 def.format.video.nFrameWidth, 991 def.format.video.nFrameHeight, 992 def.format.video.eColorFormat, 993 mRotationDegrees, 994 usage, 995 reconnect); 996} 997 998status_t ACodec::configureOutputBuffersFromNativeWindow( 999 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 1000 OMX_U32 *minUndequeuedBuffers, bool preregister) { 1001 1002 OMX_PARAM_PORTDEFINITIONTYPE def; 1003 InitOMXParams(&def); 1004 def.nPortIndex = kPortIndexOutput; 1005 1006 status_t err = mOMX->getParameter( 1007 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1008 1009 if (err == OK) { 1010 err = setupNativeWindowSizeFormatAndUsage( 1011 mNativeWindow.get(), &mNativeWindowUsageBits, preregister /* reconnect */); 1012 } 1013 if (err != OK) { 1014 mNativeWindowUsageBits = 0; 1015 return err; 1016 } 1017 1018 // Exits here for tunneled video playback codecs -- i.e. skips native window 1019 // buffer allocation step as this is managed by the tunneled OMX omponent 1020 // itself and explicitly sets def.nBufferCountActual to 0. 1021 if (mTunneled) { 1022 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 1023 def.nBufferCountActual = 0; 1024 err = mOMX->setParameter( 1025 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1026 1027 *minUndequeuedBuffers = 0; 1028 *bufferCount = 0; 1029 *bufferSize = 0; 1030 return err; 1031 } 1032 1033 *minUndequeuedBuffers = 0; 1034 err = mNativeWindow->query( 1035 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 1036 (int *)minUndequeuedBuffers); 1037 1038 if (err != 0) { 1039 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 1040 strerror(-err), -err); 1041 return err; 1042 } 1043 1044 // FIXME: assume that surface is controlled by app (native window 1045 // returns the number for the case when surface is not controlled by app) 1046 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 1047 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 1048 1049 // Use conservative allocation while also trying to reduce starvation 1050 // 1051 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 1052 // minimum needed for the consumer to be able to work 1053 // 2. try to allocate two (2) additional buffers to reduce starvation from 1054 // the consumer 1055 // plus an extra buffer to account for incorrect minUndequeuedBufs 1056 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 1057 OMX_U32 newBufferCount = 1058 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1059 def.nBufferCountActual = newBufferCount; 1060 err = mOMX->setParameter( 1061 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1062 1063 if (err == OK) { 1064 *minUndequeuedBuffers += extraBuffers; 1065 break; 1066 } 1067 1068 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1069 mComponentName.c_str(), newBufferCount, err); 1070 /* exit condition */ 1071 if (extraBuffers == 0) { 1072 return err; 1073 } 1074 } 1075 1076 err = native_window_set_buffer_count( 1077 mNativeWindow.get(), def.nBufferCountActual); 1078 1079 if (err != 0) { 1080 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1081 -err); 1082 return err; 1083 } 1084 1085 *bufferCount = def.nBufferCountActual; 1086 *bufferSize = def.nBufferSize; 1087 return err; 1088} 1089 1090status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1091 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1092 status_t err = configureOutputBuffersFromNativeWindow( 1093 &bufferCount, &bufferSize, &minUndequeuedBuffers, true /* preregister */); 1094 if (err != 0) 1095 return err; 1096 mNumUndequeuedBuffers = minUndequeuedBuffers; 1097 1098 if (!storingMetadataInDecodedBuffers()) { 1099 static_cast<Surface*>(mNativeWindow.get()) 1100 ->getIGraphicBufferProducer()->allowAllocation(true); 1101 } 1102 1103 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1104 "output port", 1105 mComponentName.c_str(), bufferCount, bufferSize); 1106 1107 // Dequeue buffers and send them to OMX 1108 for (OMX_U32 i = 0; i < bufferCount; i++) { 1109 ANativeWindowBuffer *buf; 1110 int fenceFd; 1111 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1112 if (err != 0) { 1113 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1114 break; 1115 } 1116 1117 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1118 BufferInfo info; 1119 info.mStatus = BufferInfo::OWNED_BY_US; 1120 info.mFenceFd = fenceFd; 1121 info.mIsReadFence = false; 1122 info.mRenderInfo = NULL; 1123 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 1124 info.mCodecData = info.mData; 1125 info.mGraphicBuffer = graphicBuffer; 1126 mBuffers[kPortIndexOutput].push(info); 1127 1128 IOMX::buffer_id bufferId; 1129 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 1130 &bufferId); 1131 if (err != 0) { 1132 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1133 "%d", i, err); 1134 break; 1135 } 1136 1137 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1138 1139 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1140 mComponentName.c_str(), 1141 bufferId, graphicBuffer.get()); 1142 } 1143 1144 OMX_U32 cancelStart; 1145 OMX_U32 cancelEnd; 1146 1147 if (err != 0) { 1148 // If an error occurred while dequeuing we need to cancel any buffers 1149 // that were dequeued. 1150 cancelStart = 0; 1151 cancelEnd = mBuffers[kPortIndexOutput].size(); 1152 } else { 1153 // Return the required minimum undequeued buffers to the native window. 1154 cancelStart = bufferCount - minUndequeuedBuffers; 1155 cancelEnd = bufferCount; 1156 } 1157 1158 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1159 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1160 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1161 status_t error = cancelBufferToNativeWindow(info); 1162 if (err == 0) { 1163 err = error; 1164 } 1165 } 1166 } 1167 1168 if (!storingMetadataInDecodedBuffers()) { 1169 static_cast<Surface*>(mNativeWindow.get()) 1170 ->getIGraphicBufferProducer()->allowAllocation(false); 1171 } 1172 1173 return err; 1174} 1175 1176status_t ACodec::allocateOutputMetadataBuffers() { 1177 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1178 status_t err = configureOutputBuffersFromNativeWindow( 1179 &bufferCount, &bufferSize, &minUndequeuedBuffers, 1180 mLegacyAdaptiveExperiment /* preregister */); 1181 if (err != 0) 1182 return err; 1183 mNumUndequeuedBuffers = minUndequeuedBuffers; 1184 1185 ALOGV("[%s] Allocating %u meta buffers on output port", 1186 mComponentName.c_str(), bufferCount); 1187 1188 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1189 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1190 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1191 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1192 1193 // Dequeue buffers and send them to OMX 1194 for (OMX_U32 i = 0; i < bufferCount; i++) { 1195 BufferInfo info; 1196 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1197 info.mFenceFd = -1; 1198 info.mRenderInfo = NULL; 1199 info.mGraphicBuffer = NULL; 1200 info.mDequeuedAt = mDequeueCounter; 1201 1202 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1203 if (mem == NULL || mem->pointer() == NULL) { 1204 return NO_MEMORY; 1205 } 1206 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1207 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1208 } 1209 info.mData = new ABuffer(mem->pointer(), mem->size()); 1210 info.mMemRef = mem; 1211 info.mCodecData = info.mData; 1212 info.mCodecRef = mem; 1213 1214 // we use useBuffer for metadata regardless of quirks 1215 err = mOMX->useBuffer( 1216 mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1217 mBuffers[kPortIndexOutput].push(info); 1218 1219 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1220 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1221 } 1222 1223 if (mLegacyAdaptiveExperiment) { 1224 // preallocate and preregister buffers 1225 static_cast<Surface *>(mNativeWindow.get()) 1226 ->getIGraphicBufferProducer()->allowAllocation(true); 1227 1228 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1229 "output port", 1230 mComponentName.c_str(), bufferCount, bufferSize); 1231 1232 // Dequeue buffers then cancel them all 1233 for (OMX_U32 i = 0; i < bufferCount; i++) { 1234 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1235 1236 ANativeWindowBuffer *buf; 1237 int fenceFd; 1238 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1239 if (err != 0) { 1240 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1241 break; 1242 } 1243 1244 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1245 mOMX->updateGraphicBufferInMeta( 1246 mNode, kPortIndexOutput, graphicBuffer, info->mBufferID); 1247 info->mStatus = BufferInfo::OWNED_BY_US; 1248 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1249 info->mGraphicBuffer = graphicBuffer; 1250 } 1251 1252 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1253 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1254 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1255 status_t error = cancelBufferToNativeWindow(info); 1256 if (err == OK) { 1257 err = error; 1258 } 1259 } 1260 } 1261 1262 static_cast<Surface*>(mNativeWindow.get()) 1263 ->getIGraphicBufferProducer()->allowAllocation(false); 1264 } 1265 1266 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1267 return err; 1268} 1269 1270status_t ACodec::submitOutputMetadataBuffer() { 1271 CHECK(storingMetadataInDecodedBuffers()); 1272 if (mMetadataBuffersToSubmit == 0) 1273 return OK; 1274 1275 BufferInfo *info = dequeueBufferFromNativeWindow(); 1276 if (info == NULL) { 1277 return ERROR_IO; 1278 } 1279 1280 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1281 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1282 1283 --mMetadataBuffersToSubmit; 1284 info->checkWriteFence("submitOutputMetadataBuffer"); 1285 status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd); 1286 info->mFenceFd = -1; 1287 if (err == OK) { 1288 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1289 } 1290 1291 return err; 1292} 1293 1294status_t ACodec::waitForFence(int fd, const char *dbg ) { 1295 status_t res = OK; 1296 if (fd >= 0) { 1297 sp<Fence> fence = new Fence(fd); 1298 res = fence->wait(IOMX::kFenceTimeoutMs); 1299 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1300 } 1301 return res; 1302} 1303 1304// static 1305const char *ACodec::_asString(BufferInfo::Status s) { 1306 switch (s) { 1307 case BufferInfo::OWNED_BY_US: return "OUR"; 1308 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1309 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1310 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1311 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1312 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1313 default: return "?"; 1314 } 1315} 1316 1317void ACodec::dumpBuffers(OMX_U32 portIndex) { 1318 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1319 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1320 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1321 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1322 const BufferInfo &info = mBuffers[portIndex][i]; 1323 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1324 i, info.mBufferID, info.mGraphicBuffer.get(), 1325 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1326 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1327 } 1328} 1329 1330status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1331 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1332 1333 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1334 mComponentName.c_str(), info->mBufferID); 1335 1336 info->checkWriteFence("cancelBufferToNativeWindow"); 1337 int err = mNativeWindow->cancelBuffer( 1338 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1339 info->mFenceFd = -1; 1340 1341 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1342 mComponentName.c_str(), info->mBufferID); 1343 // change ownership even if cancelBuffer fails 1344 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1345 1346 return err; 1347} 1348 1349void ACodec::updateRenderInfoForDequeuedBuffer( 1350 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1351 1352 info->mRenderInfo = 1353 mRenderTracker.updateInfoForDequeuedBuffer( 1354 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1355 1356 // check for any fences already signaled 1357 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1358} 1359 1360void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1361 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1362 mRenderTracker.dumpRenderQueue(); 1363 } 1364} 1365 1366void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1367 sp<AMessage> msg = mNotify->dup(); 1368 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1369 std::list<FrameRenderTracker::Info> done = 1370 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1371 1372 // unlink untracked frames 1373 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1374 it != done.cend(); ++it) { 1375 ssize_t index = it->getIndex(); 1376 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1377 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1378 } else if (index >= 0) { 1379 // THIS SHOULD NEVER HAPPEN 1380 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1381 } 1382 } 1383 1384 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1385 msg->post(); 1386 } 1387} 1388 1389ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1390 ANativeWindowBuffer *buf; 1391 CHECK(mNativeWindow.get() != NULL); 1392 1393 if (mTunneled) { 1394 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1395 " video playback mode mode!"); 1396 return NULL; 1397 } 1398 1399 if (mFatalError) { 1400 ALOGW("not dequeuing from native window due to fatal error"); 1401 return NULL; 1402 } 1403 1404 int fenceFd = -1; 1405 do { 1406 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1407 if (err != 0) { 1408 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1409 return NULL; 1410 } 1411 1412 bool stale = false; 1413 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1414 i--; 1415 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1416 1417 if (info->mGraphicBuffer != NULL && 1418 info->mGraphicBuffer->handle == buf->handle) { 1419 // Since consumers can attach buffers to BufferQueues, it is possible 1420 // that a known yet stale buffer can return from a surface that we 1421 // once used. We can simply ignore this as we have already dequeued 1422 // this buffer properly. NOTE: this does not eliminate all cases, 1423 // e.g. it is possible that we have queued the valid buffer to the 1424 // NW, and a stale copy of the same buffer gets dequeued - which will 1425 // be treated as the valid buffer by ACodec. 1426 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1427 ALOGI("dequeued stale buffer %p. discarding", buf); 1428 stale = true; 1429 break; 1430 } 1431 1432 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1433 info->mStatus = BufferInfo::OWNED_BY_US; 1434 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1435 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1436 return info; 1437 } 1438 } 1439 1440 // It is also possible to receive a previously unregistered buffer 1441 // in non-meta mode. These should be treated as stale buffers. The 1442 // same is possible in meta mode, in which case, it will be treated 1443 // as a normal buffer, which is not desirable. 1444 // TODO: fix this. 1445 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1446 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1447 stale = true; 1448 } 1449 if (stale) { 1450 // TODO: detach stale buffer, but there is no API yet to do it. 1451 buf = NULL; 1452 } 1453 } while (buf == NULL); 1454 1455 // get oldest undequeued buffer 1456 BufferInfo *oldest = NULL; 1457 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1458 i--; 1459 BufferInfo *info = 1460 &mBuffers[kPortIndexOutput].editItemAt(i); 1461 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1462 (oldest == NULL || 1463 // avoid potential issues from counter rolling over 1464 mDequeueCounter - info->mDequeuedAt > 1465 mDequeueCounter - oldest->mDequeuedAt)) { 1466 oldest = info; 1467 } 1468 } 1469 1470 // it is impossible dequeue a buffer when there are no buffers with ANW 1471 CHECK(oldest != NULL); 1472 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1473 // while loop above does not complete 1474 CHECK(storingMetadataInDecodedBuffers()); 1475 1476 // discard buffer in LRU info and replace with new buffer 1477 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1478 oldest->mStatus = BufferInfo::OWNED_BY_US; 1479 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1480 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1481 oldest->mRenderInfo = NULL; 1482 1483 mOMX->updateGraphicBufferInMeta( 1484 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 1485 oldest->mBufferID); 1486 1487 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1488 VideoGrallocMetadata *grallocMeta = 1489 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base()); 1490 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1491 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1492 mDequeueCounter - oldest->mDequeuedAt, 1493 (void *)(uintptr_t)grallocMeta->pHandle, 1494 oldest->mGraphicBuffer->handle, oldest->mData->base()); 1495 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1496 VideoNativeMetadata *nativeMeta = 1497 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base()); 1498 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1499 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1500 mDequeueCounter - oldest->mDequeuedAt, 1501 (void *)(uintptr_t)nativeMeta->pBuffer, 1502 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base()); 1503 } 1504 1505 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1506 return oldest; 1507} 1508 1509status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1510 status_t err = OK; 1511 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1512 i--; 1513 status_t err2 = freeBuffer(portIndex, i); 1514 if (err == OK) { 1515 err = err2; 1516 } 1517 } 1518 1519 // clear mDealer even on an error 1520 mDealer[portIndex].clear(); 1521 return err; 1522} 1523 1524status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1525 status_t err = OK; 1526 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1527 i--; 1528 BufferInfo *info = 1529 &mBuffers[kPortIndexOutput].editItemAt(i); 1530 1531 // At this time some buffers may still be with the component 1532 // or being drained. 1533 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1534 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1535 status_t err2 = freeBuffer(kPortIndexOutput, i); 1536 if (err == OK) { 1537 err = err2; 1538 } 1539 } 1540 } 1541 1542 return err; 1543} 1544 1545status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1546 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1547 status_t err = OK; 1548 1549 // there should not be any fences in the metadata 1550 MetadataBufferType type = 1551 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1552 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL 1553 && info->mData->size() >= sizeof(VideoNativeMetadata)) { 1554 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; 1555 if (fenceFd >= 0) { 1556 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1557 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1558 } 1559 } 1560 1561 switch (info->mStatus) { 1562 case BufferInfo::OWNED_BY_US: 1563 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1564 (void)cancelBufferToNativeWindow(info); 1565 } 1566 // fall through 1567 1568 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1569 err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID); 1570 break; 1571 1572 default: 1573 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1574 err = FAILED_TRANSACTION; 1575 break; 1576 } 1577 1578 if (info->mFenceFd >= 0) { 1579 ::close(info->mFenceFd); 1580 } 1581 1582 if (portIndex == kPortIndexOutput) { 1583 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1584 info->mRenderInfo = NULL; 1585 } 1586 1587 // remove buffer even if mOMX->freeBuffer fails 1588 mBuffers[portIndex].removeAt(i); 1589 return err; 1590} 1591 1592ACodec::BufferInfo *ACodec::findBufferByID( 1593 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1594 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1595 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1596 1597 if (info->mBufferID == bufferID) { 1598 if (index != NULL) { 1599 *index = i; 1600 } 1601 return info; 1602 } 1603 } 1604 1605 ALOGE("Could not find buffer with ID %u", bufferID); 1606 return NULL; 1607} 1608 1609status_t ACodec::setComponentRole( 1610 bool isEncoder, const char *mime) { 1611 const char *role = GetComponentRole(isEncoder, mime); 1612 if (role == NULL) { 1613 return BAD_VALUE; 1614 } 1615 status_t err = SetComponentRole(mOMX, mNode, role); 1616 if (err != OK) { 1617 ALOGW("[%s] Failed to set standard component role '%s'.", 1618 mComponentName.c_str(), role); 1619 } 1620 return err; 1621} 1622 1623status_t ACodec::configureCodec( 1624 const char *mime, const sp<AMessage> &msg) { 1625 int32_t encoder; 1626 if (!msg->findInt32("encoder", &encoder)) { 1627 encoder = false; 1628 } 1629 1630 sp<AMessage> inputFormat = new AMessage; 1631 sp<AMessage> outputFormat = new AMessage; 1632 mConfigFormat = msg; 1633 1634 mIsEncoder = encoder; 1635 1636 mInputMetadataType = kMetadataBufferTypeInvalid; 1637 mOutputMetadataType = kMetadataBufferTypeInvalid; 1638 1639 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1640 1641 if (err != OK) { 1642 return err; 1643 } 1644 1645 int32_t bitRate = 0; 1646 // FLAC encoder doesn't need a bitrate, other encoders do 1647 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1648 && !msg->findInt32("bitrate", &bitRate)) { 1649 return INVALID_OPERATION; 1650 } 1651 1652 // propagate bitrate to the output so that the muxer has it 1653 if (encoder && msg->findInt32("bitrate", &bitRate)) { 1654 // Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the 1655 // average bitrate. We've been setting both bitrate and max-bitrate to this same value. 1656 outputFormat->setInt32("bitrate", bitRate); 1657 outputFormat->setInt32("max-bitrate", bitRate); 1658 } 1659 1660 int32_t storeMeta; 1661 if (encoder 1662 && msg->findInt32("android._input-metadata-buffer-type", &storeMeta) 1663 && storeMeta != kMetadataBufferTypeInvalid) { 1664 mInputMetadataType = (MetadataBufferType)storeMeta; 1665 err = mOMX->storeMetaDataInBuffers( 1666 mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1667 if (err != OK) { 1668 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1669 mComponentName.c_str(), err); 1670 1671 return err; 1672 } else if (storeMeta == kMetadataBufferTypeANWBuffer 1673 && mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1674 // IOMX translates ANWBuffers to gralloc source already. 1675 mInputMetadataType = (MetadataBufferType)storeMeta; 1676 } 1677 1678 uint32_t usageBits; 1679 if (mOMX->getParameter( 1680 mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1681 &usageBits, sizeof(usageBits)) == OK) { 1682 inputFormat->setInt32( 1683 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1684 } 1685 } 1686 1687 int32_t prependSPSPPS = 0; 1688 if (encoder 1689 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1690 && prependSPSPPS != 0) { 1691 OMX_INDEXTYPE index; 1692 err = mOMX->getExtensionIndex( 1693 mNode, 1694 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1695 &index); 1696 1697 if (err == OK) { 1698 PrependSPSPPSToIDRFramesParams params; 1699 InitOMXParams(¶ms); 1700 params.bEnable = OMX_TRUE; 1701 1702 err = mOMX->setParameter( 1703 mNode, index, ¶ms, sizeof(params)); 1704 } 1705 1706 if (err != OK) { 1707 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1708 "IDR frames. (err %d)", err); 1709 1710 return err; 1711 } 1712 } 1713 1714 // Only enable metadata mode on encoder output if encoder can prepend 1715 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1716 // opaque handle, to which we don't have access. 1717 int32_t video = !strncasecmp(mime, "video/", 6); 1718 mIsVideo = video; 1719 if (encoder && video) { 1720 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1721 && msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta) 1722 && storeMeta != 0); 1723 1724 mOutputMetadataType = kMetadataBufferTypeNativeHandleSource; 1725 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType); 1726 if (err != OK) { 1727 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1728 mComponentName.c_str(), err); 1729 } 1730 1731 if (!msg->findInt64( 1732 "repeat-previous-frame-after", 1733 &mRepeatFrameDelayUs)) { 1734 mRepeatFrameDelayUs = -1ll; 1735 } 1736 1737 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1738 mMaxPtsGapUs = -1ll; 1739 } 1740 1741 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1742 mMaxFps = -1; 1743 } 1744 1745 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1746 mTimePerCaptureUs = -1ll; 1747 } 1748 1749 if (!msg->findInt32( 1750 "create-input-buffers-suspended", 1751 (int32_t*)&mCreateInputBuffersSuspended)) { 1752 mCreateInputBuffersSuspended = false; 1753 } 1754 } 1755 1756 // NOTE: we only use native window for video decoders 1757 sp<RefBase> obj; 1758 bool haveNativeWindow = msg->findObject("native-window", &obj) 1759 && obj != NULL && video && !encoder; 1760 mUsingNativeWindow = haveNativeWindow; 1761 mLegacyAdaptiveExperiment = false; 1762 if (video && !encoder) { 1763 inputFormat->setInt32("adaptive-playback", false); 1764 1765 int32_t usageProtected; 1766 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1767 if (!haveNativeWindow) { 1768 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1769 return PERMISSION_DENIED; 1770 } 1771 mFlags |= kFlagIsGrallocUsageProtected; 1772 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1773 } 1774 1775 if (mFlags & kFlagIsSecure) { 1776 // use native_handles for secure input buffers 1777 err = mOMX->enableNativeBuffers( 1778 mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1779 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1780 err = OK; // ignore error for now 1781 } 1782 } 1783 if (haveNativeWindow) { 1784 sp<ANativeWindow> nativeWindow = 1785 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1786 1787 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1788 int32_t autoFrc; 1789 if (msg->findInt32("auto-frc", &autoFrc)) { 1790 bool enabled = autoFrc; 1791 OMX_CONFIG_BOOLEANTYPE config; 1792 InitOMXParams(&config); 1793 config.bEnabled = (OMX_BOOL)enabled; 1794 status_t temp = mOMX->setConfig( 1795 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1796 &config, sizeof(config)); 1797 if (temp == OK) { 1798 outputFormat->setInt32("auto-frc", enabled); 1799 } else if (enabled) { 1800 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1801 } 1802 } 1803 // END of temporary support for automatic FRC 1804 1805 int32_t tunneled; 1806 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1807 tunneled != 0) { 1808 ALOGI("Configuring TUNNELED video playback."); 1809 mTunneled = true; 1810 1811 int32_t audioHwSync = 0; 1812 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1813 ALOGW("No Audio HW Sync provided for video tunnel"); 1814 } 1815 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1816 if (err != OK) { 1817 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1818 audioHwSync, nativeWindow.get()); 1819 return err; 1820 } 1821 1822 int32_t maxWidth = 0, maxHeight = 0; 1823 if (msg->findInt32("max-width", &maxWidth) && 1824 msg->findInt32("max-height", &maxHeight)) { 1825 1826 err = mOMX->prepareForAdaptivePlayback( 1827 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1828 if (err != OK) { 1829 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1830 mComponentName.c_str(), err); 1831 // allow failure 1832 err = OK; 1833 } else { 1834 inputFormat->setInt32("max-width", maxWidth); 1835 inputFormat->setInt32("max-height", maxHeight); 1836 inputFormat->setInt32("adaptive-playback", true); 1837 } 1838 } 1839 } else { 1840 ALOGV("Configuring CPU controlled video playback."); 1841 mTunneled = false; 1842 1843 // Explicity reset the sideband handle of the window for 1844 // non-tunneled video in case the window was previously used 1845 // for a tunneled video playback. 1846 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1847 if (err != OK) { 1848 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1849 return err; 1850 } 1851 1852 // Always try to enable dynamic output buffers on native surface 1853 mOutputMetadataType = kMetadataBufferTypeANWBuffer; 1854 err = mOMX->storeMetaDataInBuffers( 1855 mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1856 if (err != OK) { 1857 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1858 mComponentName.c_str(), err); 1859 1860 // if adaptive playback has been requested, try JB fallback 1861 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1862 // LARGE MEMORY REQUIREMENT 1863 1864 // we will not do adaptive playback on software accessed 1865 // surfaces as they never had to respond to changes in the 1866 // crop window, and we don't trust that they will be able to. 1867 int usageBits = 0; 1868 bool canDoAdaptivePlayback; 1869 1870 if (nativeWindow->query( 1871 nativeWindow.get(), 1872 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1873 &usageBits) != OK) { 1874 canDoAdaptivePlayback = false; 1875 } else { 1876 canDoAdaptivePlayback = 1877 (usageBits & 1878 (GRALLOC_USAGE_SW_READ_MASK | 1879 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1880 } 1881 1882 int32_t maxWidth = 0, maxHeight = 0; 1883 if (canDoAdaptivePlayback && 1884 msg->findInt32("max-width", &maxWidth) && 1885 msg->findInt32("max-height", &maxHeight)) { 1886 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1887 mComponentName.c_str(), maxWidth, maxHeight); 1888 1889 err = mOMX->prepareForAdaptivePlayback( 1890 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1891 maxHeight); 1892 ALOGW_IF(err != OK, 1893 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1894 mComponentName.c_str(), err); 1895 1896 if (err == OK) { 1897 inputFormat->setInt32("max-width", maxWidth); 1898 inputFormat->setInt32("max-height", maxHeight); 1899 inputFormat->setInt32("adaptive-playback", true); 1900 } 1901 } 1902 // allow failure 1903 err = OK; 1904 } else { 1905 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1906 mComponentName.c_str()); 1907 CHECK(storingMetadataInDecodedBuffers()); 1908 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 1909 "legacy-adaptive", !msg->contains("no-experiments")); 1910 1911 inputFormat->setInt32("adaptive-playback", true); 1912 } 1913 1914 int32_t push; 1915 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 1916 && push != 0) { 1917 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1918 } 1919 } 1920 1921 int32_t rotationDegrees; 1922 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 1923 mRotationDegrees = rotationDegrees; 1924 } else { 1925 mRotationDegrees = 0; 1926 } 1927 } 1928 1929 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 1930 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 1931 // invalid encodings will default to PCM-16bit in setupRawAudioFormat. 1932 1933 if (video) { 1934 // determine need for software renderer 1935 bool usingSwRenderer = false; 1936 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 1937 usingSwRenderer = true; 1938 haveNativeWindow = false; 1939 } 1940 1941 if (encoder) { 1942 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 1943 } else { 1944 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 1945 } 1946 1947 if (err != OK) { 1948 return err; 1949 } 1950 1951 if (haveNativeWindow) { 1952 mNativeWindow = static_cast<Surface *>(obj.get()); 1953 } 1954 1955 // initialize native window now to get actual output format 1956 // TODO: this is needed for some encoders even though they don't use native window 1957 err = initNativeWindow(); 1958 if (err != OK) { 1959 return err; 1960 } 1961 1962 // fallback for devices that do not handle flex-YUV for native buffers 1963 if (haveNativeWindow) { 1964 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 1965 if (msg->findInt32("color-format", &requestedColorFormat) && 1966 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 1967 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 1968 if (err != OK) { 1969 return err; 1970 } 1971 int32_t colorFormat = OMX_COLOR_FormatUnused; 1972 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 1973 if (!outputFormat->findInt32("color-format", &colorFormat)) { 1974 ALOGE("ouptut port did not have a color format (wrong domain?)"); 1975 return BAD_VALUE; 1976 } 1977 ALOGD("[%s] Requested output format %#x and got %#x.", 1978 mComponentName.c_str(), requestedColorFormat, colorFormat); 1979 if (!IsFlexibleColorFormat( 1980 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 1981 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 1982 // device did not handle flex-YUV request for native window, fall back 1983 // to SW renderer 1984 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 1985 mNativeWindow.clear(); 1986 mNativeWindowUsageBits = 0; 1987 haveNativeWindow = false; 1988 usingSwRenderer = true; 1989 if (storingMetadataInDecodedBuffers()) { 1990 err = mOMX->storeMetaDataInBuffers( 1991 mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 1992 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 1993 // TODO: implement adaptive-playback support for bytebuffer mode. 1994 // This is done by SW codecs, but most HW codecs don't support it. 1995 inputFormat->setInt32("adaptive-playback", false); 1996 } 1997 if (err == OK) { 1998 err = mOMX->enableNativeBuffers( 1999 mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 2000 } 2001 if (mFlags & kFlagIsGrallocUsageProtected) { 2002 // fallback is not supported for protected playback 2003 err = PERMISSION_DENIED; 2004 } else if (err == OK) { 2005 err = setupVideoDecoder( 2006 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2007 } 2008 } 2009 } 2010 } 2011 2012 if (usingSwRenderer) { 2013 outputFormat->setInt32("using-sw-renderer", 1); 2014 } 2015 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2016 int32_t numChannels, sampleRate; 2017 if (!msg->findInt32("channel-count", &numChannels) 2018 || !msg->findInt32("sample-rate", &sampleRate)) { 2019 // Since we did not always check for these, leave them optional 2020 // and have the decoder figure it all out. 2021 err = OK; 2022 } else { 2023 err = setupRawAudioFormat( 2024 encoder ? kPortIndexInput : kPortIndexOutput, 2025 sampleRate, 2026 numChannels); 2027 } 2028 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2029 int32_t numChannels, sampleRate; 2030 if (!msg->findInt32("channel-count", &numChannels) 2031 || !msg->findInt32("sample-rate", &sampleRate)) { 2032 err = INVALID_OPERATION; 2033 } else { 2034 int32_t isADTS, aacProfile; 2035 int32_t sbrMode; 2036 int32_t maxOutputChannelCount; 2037 int32_t pcmLimiterEnable; 2038 drcParams_t drc; 2039 if (!msg->findInt32("is-adts", &isADTS)) { 2040 isADTS = 0; 2041 } 2042 if (!msg->findInt32("aac-profile", &aacProfile)) { 2043 aacProfile = OMX_AUDIO_AACObjectNull; 2044 } 2045 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2046 sbrMode = -1; 2047 } 2048 2049 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2050 maxOutputChannelCount = -1; 2051 } 2052 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2053 // value is unknown 2054 pcmLimiterEnable = -1; 2055 } 2056 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2057 // value is unknown 2058 drc.encodedTargetLevel = -1; 2059 } 2060 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2061 // value is unknown 2062 drc.drcCut = -1; 2063 } 2064 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2065 // value is unknown 2066 drc.drcBoost = -1; 2067 } 2068 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2069 // value is unknown 2070 drc.heavyCompression = -1; 2071 } 2072 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2073 // value is unknown 2074 drc.targetRefLevel = -1; 2075 } 2076 2077 err = setupAACCodec( 2078 encoder, numChannels, sampleRate, bitRate, aacProfile, 2079 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2080 pcmLimiterEnable); 2081 } 2082 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2083 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2084 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2085 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2086 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2087 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2088 // These are PCM-like formats with a fixed sample rate but 2089 // a variable number of channels. 2090 2091 int32_t numChannels; 2092 if (!msg->findInt32("channel-count", &numChannels)) { 2093 err = INVALID_OPERATION; 2094 } else { 2095 int32_t sampleRate; 2096 if (!msg->findInt32("sample-rate", &sampleRate)) { 2097 sampleRate = 8000; 2098 } 2099 err = setupG711Codec(encoder, sampleRate, numChannels); 2100 } 2101 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2102 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2103 if (encoder && 2104 (!msg->findInt32("channel-count", &numChannels) 2105 || !msg->findInt32("sample-rate", &sampleRate))) { 2106 ALOGE("missing channel count or sample rate for FLAC encoder"); 2107 err = INVALID_OPERATION; 2108 } else { 2109 if (encoder) { 2110 if (!msg->findInt32( 2111 "complexity", &compressionLevel) && 2112 !msg->findInt32( 2113 "flac-compression-level", &compressionLevel)) { 2114 compressionLevel = 5; // default FLAC compression level 2115 } else if (compressionLevel < 0) { 2116 ALOGW("compression level %d outside [0..8] range, " 2117 "using 0", 2118 compressionLevel); 2119 compressionLevel = 0; 2120 } else if (compressionLevel > 8) { 2121 ALOGW("compression level %d outside [0..8] range, " 2122 "using 8", 2123 compressionLevel); 2124 compressionLevel = 8; 2125 } 2126 } 2127 err = setupFlacCodec( 2128 encoder, numChannels, sampleRate, compressionLevel); 2129 } 2130 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2131 int32_t numChannels, sampleRate; 2132 if (encoder 2133 || !msg->findInt32("channel-count", &numChannels) 2134 || !msg->findInt32("sample-rate", &sampleRate)) { 2135 err = INVALID_OPERATION; 2136 } else { 2137 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); 2138 } 2139 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2140 int32_t numChannels; 2141 int32_t sampleRate; 2142 if (!msg->findInt32("channel-count", &numChannels) 2143 || !msg->findInt32("sample-rate", &sampleRate)) { 2144 err = INVALID_OPERATION; 2145 } else { 2146 err = setupAC3Codec(encoder, numChannels, sampleRate); 2147 } 2148 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2149 int32_t numChannels; 2150 int32_t sampleRate; 2151 if (!msg->findInt32("channel-count", &numChannels) 2152 || !msg->findInt32("sample-rate", &sampleRate)) { 2153 err = INVALID_OPERATION; 2154 } else { 2155 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2156 } 2157 } 2158 2159 if (err != OK) { 2160 return err; 2161 } 2162 2163 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2164 mEncoderDelay = 0; 2165 } 2166 2167 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2168 mEncoderPadding = 0; 2169 } 2170 2171 if (msg->findInt32("channel-mask", &mChannelMask)) { 2172 mChannelMaskPresent = true; 2173 } else { 2174 mChannelMaskPresent = false; 2175 } 2176 2177 int32_t maxInputSize; 2178 if (msg->findInt32("max-input-size", &maxInputSize)) { 2179 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2180 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2181 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2182 } 2183 2184 int32_t priority; 2185 if (msg->findInt32("priority", &priority)) { 2186 err = setPriority(priority); 2187 } 2188 2189 int32_t rateInt = -1; 2190 float rateFloat = -1; 2191 if (!msg->findFloat("operating-rate", &rateFloat)) { 2192 msg->findInt32("operating-rate", &rateInt); 2193 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2194 } 2195 if (rateFloat > 0) { 2196 err = setOperatingRate(rateFloat, video); 2197 } 2198 2199 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame. 2200 mBaseOutputFormat = outputFormat; 2201 // trigger a kWhatOutputFormatChanged msg on first buffer 2202 mLastOutputFormat.clear(); 2203 2204 err = getPortFormat(kPortIndexInput, inputFormat); 2205 if (err == OK) { 2206 err = getPortFormat(kPortIndexOutput, outputFormat); 2207 if (err == OK) { 2208 mInputFormat = inputFormat; 2209 mOutputFormat = outputFormat; 2210 } 2211 } 2212 2213 // create data converters if needed 2214 if (!video && err == OK) { 2215 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 2216 if (encoder) { 2217 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2218 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); 2219 if (mConverter[kPortIndexInput] != NULL) { 2220 mInputFormat->setInt32("pcm-encoding", pcmEncoding); 2221 } 2222 } else { 2223 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2224 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 2225 if (mConverter[kPortIndexOutput] != NULL) { 2226 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 2227 } 2228 } 2229 } 2230 2231 return err; 2232} 2233 2234status_t ACodec::setPriority(int32_t priority) { 2235 if (priority < 0) { 2236 return BAD_VALUE; 2237 } 2238 OMX_PARAM_U32TYPE config; 2239 InitOMXParams(&config); 2240 config.nU32 = (OMX_U32)priority; 2241 status_t temp = mOMX->setConfig( 2242 mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2243 &config, sizeof(config)); 2244 if (temp != OK) { 2245 ALOGI("codec does not support config priority (err %d)", temp); 2246 } 2247 return OK; 2248} 2249 2250status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2251 if (rateFloat < 0) { 2252 return BAD_VALUE; 2253 } 2254 OMX_U32 rate; 2255 if (isVideo) { 2256 if (rateFloat > 65535) { 2257 return BAD_VALUE; 2258 } 2259 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2260 } else { 2261 if (rateFloat > UINT_MAX) { 2262 return BAD_VALUE; 2263 } 2264 rate = (OMX_U32)(rateFloat); 2265 } 2266 OMX_PARAM_U32TYPE config; 2267 InitOMXParams(&config); 2268 config.nU32 = rate; 2269 status_t err = mOMX->setConfig( 2270 mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2271 &config, sizeof(config)); 2272 if (err != OK) { 2273 ALOGI("codec does not support config operating rate (err %d)", err); 2274 } 2275 return OK; 2276} 2277 2278status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2279 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2280 InitOMXParams(¶ms); 2281 params.nPortIndex = kPortIndexOutput; 2282 status_t err = mOMX->getConfig( 2283 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2284 if (err == OK) { 2285 *intraRefreshPeriod = params.nRefreshPeriod; 2286 return OK; 2287 } 2288 2289 // Fallback to query through standard OMX index. 2290 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2291 InitOMXParams(&refreshParams); 2292 refreshParams.nPortIndex = kPortIndexOutput; 2293 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2294 err = mOMX->getParameter( 2295 mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2296 if (err != OK || refreshParams.nCirMBs == 0) { 2297 *intraRefreshPeriod = 0; 2298 return OK; 2299 } 2300 2301 // Calculate period based on width and height 2302 uint32_t width, height; 2303 OMX_PARAM_PORTDEFINITIONTYPE def; 2304 InitOMXParams(&def); 2305 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2306 def.nPortIndex = kPortIndexOutput; 2307 err = mOMX->getParameter( 2308 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2309 if (err != OK) { 2310 *intraRefreshPeriod = 0; 2311 return err; 2312 } 2313 width = video_def->nFrameWidth; 2314 height = video_def->nFrameHeight; 2315 // Use H.264/AVC MacroBlock size 16x16 2316 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2317 2318 return OK; 2319} 2320 2321status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2322 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2323 InitOMXParams(¶ms); 2324 params.nPortIndex = kPortIndexOutput; 2325 params.nRefreshPeriod = intraRefreshPeriod; 2326 status_t err = mOMX->setConfig( 2327 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2328 if (err == OK) { 2329 return OK; 2330 } 2331 2332 // Only in configure state, a component could invoke setParameter. 2333 if (!inConfigure) { 2334 return INVALID_OPERATION; 2335 } else { 2336 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2337 } 2338 2339 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2340 InitOMXParams(&refreshParams); 2341 refreshParams.nPortIndex = kPortIndexOutput; 2342 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2343 2344 if (intraRefreshPeriod == 0) { 2345 // 0 means disable intra refresh. 2346 refreshParams.nCirMBs = 0; 2347 } else { 2348 // Calculate macroblocks that need to be intra coded base on width and height 2349 uint32_t width, height; 2350 OMX_PARAM_PORTDEFINITIONTYPE def; 2351 InitOMXParams(&def); 2352 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2353 def.nPortIndex = kPortIndexOutput; 2354 err = mOMX->getParameter( 2355 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2356 if (err != OK) { 2357 return err; 2358 } 2359 width = video_def->nFrameWidth; 2360 height = video_def->nFrameHeight; 2361 // Use H.264/AVC MacroBlock size 16x16 2362 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2363 } 2364 2365 err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh, 2366 &refreshParams, sizeof(refreshParams)); 2367 if (err != OK) { 2368 return err; 2369 } 2370 2371 return OK; 2372} 2373 2374status_t ACodec::configureTemporalLayers( 2375 const sp<AMessage> &msg, bool inConfigure, sp<AMessage> &outputFormat) { 2376 if (!mIsVideo || !mIsEncoder) { 2377 return INVALID_OPERATION; 2378 } 2379 2380 AString tsSchema; 2381 if (!msg->findString("ts-schema", &tsSchema)) { 2382 return OK; 2383 } 2384 2385 unsigned int numLayers = 0; 2386 unsigned int numBLayers = 0; 2387 int tags; 2388 char dummy; 2389 OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE pattern = 2390 OMX_VIDEO_AndroidTemporalLayeringPatternNone; 2391 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1 2392 && numLayers > 0) { 2393 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC; 2394 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c", 2395 &numLayers, &dummy, &numBLayers, &dummy)) 2396 && (tags == 1 || (tags == 3 && dummy == '+')) 2397 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) { 2398 numLayers += numBLayers; 2399 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid; 2400 } else { 2401 ALOGI("Ignoring unsupported ts-schema [%s]", tsSchema.c_str()); 2402 return BAD_VALUE; 2403 } 2404 2405 OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layerParams; 2406 InitOMXParams(&layerParams); 2407 layerParams.nPortIndex = kPortIndexOutput; 2408 2409 status_t err = mOMX->getParameter( 2410 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2411 &layerParams, sizeof(layerParams)); 2412 2413 if (err != OK) { 2414 return err; 2415 } else if (!(layerParams.eSupportedPatterns & pattern)) { 2416 return BAD_VALUE; 2417 } 2418 2419 numLayers = min(numLayers, layerParams.nLayerCountMax); 2420 numBLayers = min(numBLayers, layerParams.nBLayerCountMax); 2421 2422 if (!inConfigure) { 2423 OMX_VIDEO_CONFIG_ANDROID_TEMPORALLAYERINGTYPE layerConfig; 2424 InitOMXParams(&layerConfig); 2425 layerConfig.nPortIndex = kPortIndexOutput; 2426 layerConfig.ePattern = pattern; 2427 layerConfig.nPLayerCountActual = numLayers - numBLayers; 2428 layerConfig.nBLayerCountActual = numBLayers; 2429 layerConfig.bBitrateRatiosSpecified = OMX_FALSE; 2430 2431 err = mOMX->setConfig( 2432 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidVideoTemporalLayering, 2433 &layerConfig, sizeof(layerConfig)); 2434 } else { 2435 layerParams.ePattern = pattern; 2436 layerParams.nPLayerCountActual = numLayers - numBLayers; 2437 layerParams.nBLayerCountActual = numBLayers; 2438 layerParams.bBitrateRatiosSpecified = OMX_FALSE; 2439 2440 err = mOMX->setParameter( 2441 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2442 &layerParams, sizeof(layerParams)); 2443 } 2444 2445 AString configSchema; 2446 if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternAndroid) { 2447 configSchema = AStringPrintf("android.generic.%u+%u", numLayers - numBLayers, numBLayers); 2448 } else if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC) { 2449 configSchema = AStringPrintf("webrtc.vp8.%u", numLayers); 2450 } 2451 2452 if (err != OK) { 2453 ALOGW("Failed to set temporal layers to %s (requested %s)", 2454 configSchema.c_str(), tsSchema.c_str()); 2455 return err; 2456 } 2457 2458 err = mOMX->getParameter( 2459 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2460 &layerParams, sizeof(layerParams)); 2461 2462 if (err == OK) { 2463 ALOGD("Temporal layers requested:%s configured:%s got:%s(%u: P=%u, B=%u)", 2464 tsSchema.c_str(), configSchema.c_str(), 2465 asString(layerParams.ePattern), layerParams.ePattern, 2466 layerParams.nPLayerCountActual, layerParams.nBLayerCountActual); 2467 2468 if (outputFormat.get() == mOutputFormat.get()) { 2469 mOutputFormat = mOutputFormat->dup(); // trigger an output format change event 2470 } 2471 // assume we got what we configured 2472 outputFormat->setString("ts-schema", configSchema); 2473 } 2474 return err; 2475} 2476 2477status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2478 OMX_PARAM_PORTDEFINITIONTYPE def; 2479 InitOMXParams(&def); 2480 def.nPortIndex = portIndex; 2481 2482 status_t err = mOMX->getParameter( 2483 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2484 2485 if (err != OK) { 2486 return err; 2487 } 2488 2489 if (def.nBufferSize >= size) { 2490 return OK; 2491 } 2492 2493 def.nBufferSize = size; 2494 2495 err = mOMX->setParameter( 2496 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2497 2498 if (err != OK) { 2499 return err; 2500 } 2501 2502 err = mOMX->getParameter( 2503 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2504 2505 if (err != OK) { 2506 return err; 2507 } 2508 2509 if (def.nBufferSize < size) { 2510 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2511 return FAILED_TRANSACTION; 2512 } 2513 2514 return OK; 2515} 2516 2517status_t ACodec::selectAudioPortFormat( 2518 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2519 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2520 InitOMXParams(&format); 2521 2522 format.nPortIndex = portIndex; 2523 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2524 format.nIndex = index; 2525 status_t err = mOMX->getParameter( 2526 mNode, OMX_IndexParamAudioPortFormat, 2527 &format, sizeof(format)); 2528 2529 if (err != OK) { 2530 return err; 2531 } 2532 2533 if (format.eEncoding == desiredFormat) { 2534 break; 2535 } 2536 2537 if (index == kMaxIndicesToCheck) { 2538 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2539 mComponentName.c_str(), index, 2540 asString(format.eEncoding), format.eEncoding); 2541 return ERROR_UNSUPPORTED; 2542 } 2543 } 2544 2545 return mOMX->setParameter( 2546 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2547} 2548 2549status_t ACodec::setupAACCodec( 2550 bool encoder, int32_t numChannels, int32_t sampleRate, 2551 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2552 int32_t maxOutputChannelCount, const drcParams_t& drc, 2553 int32_t pcmLimiterEnable) { 2554 if (encoder && isADTS) { 2555 return -EINVAL; 2556 } 2557 2558 status_t err = setupRawAudioFormat( 2559 encoder ? kPortIndexInput : kPortIndexOutput, 2560 sampleRate, 2561 numChannels); 2562 2563 if (err != OK) { 2564 return err; 2565 } 2566 2567 if (encoder) { 2568 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2569 2570 if (err != OK) { 2571 return err; 2572 } 2573 2574 OMX_PARAM_PORTDEFINITIONTYPE def; 2575 InitOMXParams(&def); 2576 def.nPortIndex = kPortIndexOutput; 2577 2578 err = mOMX->getParameter( 2579 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2580 2581 if (err != OK) { 2582 return err; 2583 } 2584 2585 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2586 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2587 2588 err = mOMX->setParameter( 2589 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2590 2591 if (err != OK) { 2592 return err; 2593 } 2594 2595 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2596 InitOMXParams(&profile); 2597 profile.nPortIndex = kPortIndexOutput; 2598 2599 err = mOMX->getParameter( 2600 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2601 2602 if (err != OK) { 2603 return err; 2604 } 2605 2606 profile.nChannels = numChannels; 2607 2608 profile.eChannelMode = 2609 (numChannels == 1) 2610 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2611 2612 profile.nSampleRate = sampleRate; 2613 profile.nBitRate = bitRate; 2614 profile.nAudioBandWidth = 0; 2615 profile.nFrameLength = 0; 2616 profile.nAACtools = OMX_AUDIO_AACToolAll; 2617 profile.nAACERtools = OMX_AUDIO_AACERNone; 2618 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2619 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2620 switch (sbrMode) { 2621 case 0: 2622 // disable sbr 2623 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2624 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2625 break; 2626 case 1: 2627 // enable single-rate sbr 2628 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2629 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2630 break; 2631 case 2: 2632 // enable dual-rate sbr 2633 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2634 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2635 break; 2636 case -1: 2637 // enable both modes -> the codec will decide which mode should be used 2638 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2639 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2640 break; 2641 default: 2642 // unsupported sbr mode 2643 return BAD_VALUE; 2644 } 2645 2646 2647 err = mOMX->setParameter( 2648 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2649 2650 if (err != OK) { 2651 return err; 2652 } 2653 2654 return err; 2655 } 2656 2657 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2658 InitOMXParams(&profile); 2659 profile.nPortIndex = kPortIndexInput; 2660 2661 err = mOMX->getParameter( 2662 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2663 2664 if (err != OK) { 2665 return err; 2666 } 2667 2668 profile.nChannels = numChannels; 2669 profile.nSampleRate = sampleRate; 2670 2671 profile.eAACStreamFormat = 2672 isADTS 2673 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2674 : OMX_AUDIO_AACStreamFormatMP4FF; 2675 2676 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2677 InitOMXParams(&presentation); 2678 presentation.nMaxOutputChannels = maxOutputChannelCount; 2679 presentation.nDrcCut = drc.drcCut; 2680 presentation.nDrcBoost = drc.drcBoost; 2681 presentation.nHeavyCompression = drc.heavyCompression; 2682 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2683 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2684 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2685 2686 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2687 if (res == OK) { 2688 // optional parameters, will not cause configuration failure 2689 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2690 &presentation, sizeof(presentation)); 2691 } else { 2692 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2693 } 2694 mSampleRate = sampleRate; 2695 return res; 2696} 2697 2698status_t ACodec::setupAC3Codec( 2699 bool encoder, int32_t numChannels, int32_t sampleRate) { 2700 status_t err = setupRawAudioFormat( 2701 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2702 2703 if (err != OK) { 2704 return err; 2705 } 2706 2707 if (encoder) { 2708 ALOGW("AC3 encoding is not supported."); 2709 return INVALID_OPERATION; 2710 } 2711 2712 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2713 InitOMXParams(&def); 2714 def.nPortIndex = kPortIndexInput; 2715 2716 err = mOMX->getParameter( 2717 mNode, 2718 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2719 &def, 2720 sizeof(def)); 2721 2722 if (err != OK) { 2723 return err; 2724 } 2725 2726 def.nChannels = numChannels; 2727 def.nSampleRate = sampleRate; 2728 2729 return mOMX->setParameter( 2730 mNode, 2731 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2732 &def, 2733 sizeof(def)); 2734} 2735 2736status_t ACodec::setupEAC3Codec( 2737 bool encoder, int32_t numChannels, int32_t sampleRate) { 2738 status_t err = setupRawAudioFormat( 2739 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2740 2741 if (err != OK) { 2742 return err; 2743 } 2744 2745 if (encoder) { 2746 ALOGW("EAC3 encoding is not supported."); 2747 return INVALID_OPERATION; 2748 } 2749 2750 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2751 InitOMXParams(&def); 2752 def.nPortIndex = kPortIndexInput; 2753 2754 err = mOMX->getParameter( 2755 mNode, 2756 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2757 &def, 2758 sizeof(def)); 2759 2760 if (err != OK) { 2761 return err; 2762 } 2763 2764 def.nChannels = numChannels; 2765 def.nSampleRate = sampleRate; 2766 2767 return mOMX->setParameter( 2768 mNode, 2769 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2770 &def, 2771 sizeof(def)); 2772} 2773 2774static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2775 bool isAMRWB, int32_t bps) { 2776 if (isAMRWB) { 2777 if (bps <= 6600) { 2778 return OMX_AUDIO_AMRBandModeWB0; 2779 } else if (bps <= 8850) { 2780 return OMX_AUDIO_AMRBandModeWB1; 2781 } else if (bps <= 12650) { 2782 return OMX_AUDIO_AMRBandModeWB2; 2783 } else if (bps <= 14250) { 2784 return OMX_AUDIO_AMRBandModeWB3; 2785 } else if (bps <= 15850) { 2786 return OMX_AUDIO_AMRBandModeWB4; 2787 } else if (bps <= 18250) { 2788 return OMX_AUDIO_AMRBandModeWB5; 2789 } else if (bps <= 19850) { 2790 return OMX_AUDIO_AMRBandModeWB6; 2791 } else if (bps <= 23050) { 2792 return OMX_AUDIO_AMRBandModeWB7; 2793 } 2794 2795 // 23850 bps 2796 return OMX_AUDIO_AMRBandModeWB8; 2797 } else { // AMRNB 2798 if (bps <= 4750) { 2799 return OMX_AUDIO_AMRBandModeNB0; 2800 } else if (bps <= 5150) { 2801 return OMX_AUDIO_AMRBandModeNB1; 2802 } else if (bps <= 5900) { 2803 return OMX_AUDIO_AMRBandModeNB2; 2804 } else if (bps <= 6700) { 2805 return OMX_AUDIO_AMRBandModeNB3; 2806 } else if (bps <= 7400) { 2807 return OMX_AUDIO_AMRBandModeNB4; 2808 } else if (bps <= 7950) { 2809 return OMX_AUDIO_AMRBandModeNB5; 2810 } else if (bps <= 10200) { 2811 return OMX_AUDIO_AMRBandModeNB6; 2812 } 2813 2814 // 12200 bps 2815 return OMX_AUDIO_AMRBandModeNB7; 2816 } 2817} 2818 2819status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2820 OMX_AUDIO_PARAM_AMRTYPE def; 2821 InitOMXParams(&def); 2822 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2823 2824 status_t err = 2825 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2826 2827 if (err != OK) { 2828 return err; 2829 } 2830 2831 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2832 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2833 2834 err = mOMX->setParameter( 2835 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2836 2837 if (err != OK) { 2838 return err; 2839 } 2840 2841 return setupRawAudioFormat( 2842 encoder ? kPortIndexInput : kPortIndexOutput, 2843 isWAMR ? 16000 : 8000 /* sampleRate */, 2844 1 /* numChannels */); 2845} 2846 2847status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2848 if (encoder) { 2849 return INVALID_OPERATION; 2850 } 2851 2852 return setupRawAudioFormat( 2853 kPortIndexInput, sampleRate, numChannels); 2854} 2855 2856status_t ACodec::setupFlacCodec( 2857 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2858 2859 if (encoder) { 2860 OMX_AUDIO_PARAM_FLACTYPE def; 2861 InitOMXParams(&def); 2862 def.nPortIndex = kPortIndexOutput; 2863 2864 // configure compression level 2865 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2866 if (err != OK) { 2867 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2868 return err; 2869 } 2870 def.nCompressionLevel = compressionLevel; 2871 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2872 if (err != OK) { 2873 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2874 return err; 2875 } 2876 } 2877 2878 return setupRawAudioFormat( 2879 encoder ? kPortIndexInput : kPortIndexOutput, 2880 sampleRate, 2881 numChannels); 2882} 2883 2884status_t ACodec::setupRawAudioFormat( 2885 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { 2886 OMX_PARAM_PORTDEFINITIONTYPE def; 2887 InitOMXParams(&def); 2888 def.nPortIndex = portIndex; 2889 2890 status_t err = mOMX->getParameter( 2891 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2892 2893 if (err != OK) { 2894 return err; 2895 } 2896 2897 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2898 2899 err = mOMX->setParameter( 2900 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2901 2902 if (err != OK) { 2903 return err; 2904 } 2905 2906 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2907 InitOMXParams(&pcmParams); 2908 pcmParams.nPortIndex = portIndex; 2909 2910 err = mOMX->getParameter( 2911 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2912 2913 if (err != OK) { 2914 return err; 2915 } 2916 2917 pcmParams.nChannels = numChannels; 2918 switch (encoding) { 2919 case kAudioEncodingPcm8bit: 2920 pcmParams.eNumData = OMX_NumericalDataUnsigned; 2921 pcmParams.nBitPerSample = 8; 2922 break; 2923 case kAudioEncodingPcmFloat: 2924 pcmParams.eNumData = OMX_NumericalDataFloat; 2925 pcmParams.nBitPerSample = 32; 2926 break; 2927 case kAudioEncodingPcm16bit: 2928 pcmParams.eNumData = OMX_NumericalDataSigned; 2929 pcmParams.nBitPerSample = 16; 2930 break; 2931 default: 2932 return BAD_VALUE; 2933 } 2934 pcmParams.bInterleaved = OMX_TRUE; 2935 pcmParams.nSamplingRate = sampleRate; 2936 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 2937 2938 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 2939 return OMX_ErrorNone; 2940 } 2941 2942 err = mOMX->setParameter( 2943 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2944 // if we could not set up raw format to non-16-bit, try with 16-bit 2945 // NOTE: we will also verify this via readback, in case codec ignores these fields 2946 if (err != OK && encoding != kAudioEncodingPcm16bit) { 2947 pcmParams.eNumData = OMX_NumericalDataSigned; 2948 pcmParams.nBitPerSample = 16; 2949 err = mOMX->setParameter( 2950 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2951 } 2952 return err; 2953} 2954 2955status_t ACodec::configureTunneledVideoPlayback( 2956 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 2957 native_handle_t* sidebandHandle; 2958 2959 status_t err = mOMX->configureVideoTunnelMode( 2960 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 2961 if (err != OK) { 2962 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 2963 return err; 2964 } 2965 2966 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 2967 if (err != OK) { 2968 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 2969 sidebandHandle, err); 2970 return err; 2971 } 2972 2973 return OK; 2974} 2975 2976status_t ACodec::setVideoPortFormatType( 2977 OMX_U32 portIndex, 2978 OMX_VIDEO_CODINGTYPE compressionFormat, 2979 OMX_COLOR_FORMATTYPE colorFormat, 2980 bool usingNativeBuffers) { 2981 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2982 InitOMXParams(&format); 2983 format.nPortIndex = portIndex; 2984 format.nIndex = 0; 2985 bool found = false; 2986 2987 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2988 format.nIndex = index; 2989 status_t err = mOMX->getParameter( 2990 mNode, OMX_IndexParamVideoPortFormat, 2991 &format, sizeof(format)); 2992 2993 if (err != OK) { 2994 return err; 2995 } 2996 2997 // substitute back flexible color format to codec supported format 2998 OMX_U32 flexibleEquivalent; 2999 if (compressionFormat == OMX_VIDEO_CodingUnused 3000 && IsFlexibleColorFormat( 3001 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 3002 && colorFormat == flexibleEquivalent) { 3003 ALOGI("[%s] using color format %#x in place of %#x", 3004 mComponentName.c_str(), format.eColorFormat, colorFormat); 3005 colorFormat = format.eColorFormat; 3006 } 3007 3008 // The following assertion is violated by TI's video decoder. 3009 // CHECK_EQ(format.nIndex, index); 3010 3011 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 3012 if (portIndex == kPortIndexInput 3013 && colorFormat == format.eColorFormat) { 3014 // eCompressionFormat does not seem right. 3015 found = true; 3016 break; 3017 } 3018 if (portIndex == kPortIndexOutput 3019 && compressionFormat == format.eCompressionFormat) { 3020 // eColorFormat does not seem right. 3021 found = true; 3022 break; 3023 } 3024 } 3025 3026 if (format.eCompressionFormat == compressionFormat 3027 && format.eColorFormat == colorFormat) { 3028 found = true; 3029 break; 3030 } 3031 3032 if (index == kMaxIndicesToCheck) { 3033 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 3034 mComponentName.c_str(), index, 3035 asString(format.eCompressionFormat), format.eCompressionFormat, 3036 asString(format.eColorFormat), format.eColorFormat); 3037 } 3038 } 3039 3040 if (!found) { 3041 return UNKNOWN_ERROR; 3042 } 3043 3044 status_t err = mOMX->setParameter( 3045 mNode, OMX_IndexParamVideoPortFormat, 3046 &format, sizeof(format)); 3047 3048 return err; 3049} 3050 3051// Set optimal output format. OMX component lists output formats in the order 3052// of preference, but this got more complicated since the introduction of flexible 3053// YUV formats. We support a legacy behavior for applications that do not use 3054// surface output, do not specify an output format, but expect a "usable" standard 3055// OMX format. SW readable and standard formats must be flex-YUV. 3056// 3057// Suggested preference order: 3058// - optimal format for texture rendering (mediaplayer behavior) 3059// - optimal SW readable & texture renderable format (flex-YUV support) 3060// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 3061// - legacy "usable" standard formats 3062// 3063// For legacy support, we prefer a standard format, but will settle for a SW readable 3064// flex-YUV format. 3065status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 3066 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 3067 InitOMXParams(&format); 3068 format.nPortIndex = kPortIndexOutput; 3069 3070 InitOMXParams(&legacyFormat); 3071 // this field will change when we find a suitable legacy format 3072 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 3073 3074 for (OMX_U32 index = 0; ; ++index) { 3075 format.nIndex = index; 3076 status_t err = mOMX->getParameter( 3077 mNode, OMX_IndexParamVideoPortFormat, 3078 &format, sizeof(format)); 3079 if (err != OK) { 3080 // no more formats, pick legacy format if found 3081 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 3082 memcpy(&format, &legacyFormat, sizeof(format)); 3083 break; 3084 } 3085 return err; 3086 } 3087 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 3088 return OMX_ErrorBadParameter; 3089 } 3090 if (!getLegacyFlexibleFormat) { 3091 break; 3092 } 3093 // standard formats that were exposed to users before 3094 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 3095 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 3096 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 3097 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 3098 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 3099 break; 3100 } 3101 // find best legacy non-standard format 3102 OMX_U32 flexibleEquivalent; 3103 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 3104 && IsFlexibleColorFormat( 3105 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */, 3106 &flexibleEquivalent) 3107 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 3108 memcpy(&legacyFormat, &format, sizeof(format)); 3109 } 3110 } 3111 return mOMX->setParameter( 3112 mNode, OMX_IndexParamVideoPortFormat, 3113 &format, sizeof(format)); 3114} 3115 3116static const struct VideoCodingMapEntry { 3117 const char *mMime; 3118 OMX_VIDEO_CODINGTYPE mVideoCodingType; 3119} kVideoCodingMapEntry[] = { 3120 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 3121 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 3122 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 3123 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 3124 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 3125 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 3126 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 3127 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 3128}; 3129 3130static status_t GetVideoCodingTypeFromMime( 3131 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3132 for (size_t i = 0; 3133 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3134 ++i) { 3135 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3136 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3137 return OK; 3138 } 3139 } 3140 3141 *codingType = OMX_VIDEO_CodingUnused; 3142 3143 return ERROR_UNSUPPORTED; 3144} 3145 3146static status_t GetMimeTypeForVideoCoding( 3147 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3148 for (size_t i = 0; 3149 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3150 ++i) { 3151 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3152 *mime = kVideoCodingMapEntry[i].mMime; 3153 return OK; 3154 } 3155 } 3156 3157 mime->clear(); 3158 3159 return ERROR_UNSUPPORTED; 3160} 3161 3162status_t ACodec::setPortBufferNum(OMX_U32 portIndex, int bufferNum) { 3163 OMX_PARAM_PORTDEFINITIONTYPE def; 3164 InitOMXParams(&def); 3165 def.nPortIndex = portIndex; 3166 status_t err; 3167 ALOGD("Setting [%s] %s port buffer number: %d", mComponentName.c_str(), 3168 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3169 err = mOMX->getParameter( 3170 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3171 if (err != OK) { 3172 return err; 3173 } 3174 def.nBufferCountActual = bufferNum; 3175 err = mOMX->setParameter( 3176 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3177 if (err != OK) { 3178 // Component could reject this request. 3179 ALOGW("Fail to set [%s] %s port buffer number: %d", mComponentName.c_str(), 3180 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3181 } 3182 return OK; 3183} 3184 3185status_t ACodec::setupVideoDecoder( 3186 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3187 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3188 int32_t width, height; 3189 if (!msg->findInt32("width", &width) 3190 || !msg->findInt32("height", &height)) { 3191 return INVALID_OPERATION; 3192 } 3193 3194 OMX_VIDEO_CODINGTYPE compressionFormat; 3195 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3196 3197 if (err != OK) { 3198 return err; 3199 } 3200 3201 if (compressionFormat == OMX_VIDEO_CodingVP9) { 3202 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 3203 InitOMXParams(¶ms); 3204 params.nPortIndex = kPortIndexInput; 3205 // Check if VP9 decoder advertises supported profiles. 3206 params.nProfileIndex = 0; 3207 status_t err = mOMX->getParameter( 3208 mNode, 3209 OMX_IndexParamVideoProfileLevelQuerySupported, 3210 ¶ms, 3211 sizeof(params)); 3212 mIsLegacyVP9Decoder = err != OK; 3213 } 3214 3215 err = setVideoPortFormatType( 3216 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3217 3218 if (err != OK) { 3219 return err; 3220 } 3221 3222 int32_t tmp; 3223 if (msg->findInt32("color-format", &tmp)) { 3224 OMX_COLOR_FORMATTYPE colorFormat = 3225 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3226 err = setVideoPortFormatType( 3227 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3228 if (err != OK) { 3229 ALOGW("[%s] does not support color format %d", 3230 mComponentName.c_str(), colorFormat); 3231 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3232 } 3233 } else { 3234 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3235 } 3236 3237 if (err != OK) { 3238 return err; 3239 } 3240 3241 // Set the component input buffer number to be |tmp|. If succeed, 3242 // component will set input port buffer number to be |tmp|. If fail, 3243 // component will keep the same buffer number as before. 3244 if (msg->findInt32("android._num-input-buffers", &tmp)) { 3245 err = setPortBufferNum(kPortIndexInput, tmp); 3246 if (err != OK) 3247 return err; 3248 } 3249 3250 // Set the component output buffer number to be |tmp|. If succeed, 3251 // component will set output port buffer number to be |tmp|. If fail, 3252 // component will keep the same buffer number as before. 3253 if (msg->findInt32("android._num-output-buffers", &tmp)) { 3254 err = setPortBufferNum(kPortIndexOutput, tmp); 3255 if (err != OK) 3256 return err; 3257 } 3258 3259 int32_t frameRateInt; 3260 float frameRateFloat; 3261 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3262 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3263 frameRateInt = -1; 3264 } 3265 frameRateFloat = (float)frameRateInt; 3266 } 3267 3268 err = setVideoFormatOnPort( 3269 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3270 3271 if (err != OK) { 3272 return err; 3273 } 3274 3275 err = setVideoFormatOnPort( 3276 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3277 3278 if (err != OK) { 3279 return err; 3280 } 3281 3282 err = setColorAspectsForVideoDecoder( 3283 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3284 if (err == ERROR_UNSUPPORTED) { // support is optional 3285 err = OK; 3286 } 3287 3288 if (err != OK) { 3289 return err; 3290 } 3291 3292 err = setHDRStaticInfoForVideoCodec(kPortIndexOutput, msg, outputFormat); 3293 if (err == ERROR_UNSUPPORTED) { // support is optional 3294 err = OK; 3295 } 3296 return err; 3297} 3298 3299status_t ACodec::initDescribeColorAspectsIndex() { 3300 status_t err = mOMX->getExtensionIndex( 3301 mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3302 if (err != OK) { 3303 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3304 } 3305 return err; 3306} 3307 3308status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3309 status_t err = ERROR_UNSUPPORTED; 3310 if (mDescribeColorAspectsIndex) { 3311 err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3312 } 3313 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3314 mComponentName.c_str(), 3315 params.sAspects.mRange, asString(params.sAspects.mRange), 3316 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3317 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3318 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3319 err, asString(err)); 3320 3321 if (verify && err == OK) { 3322 err = getCodecColorAspects(params); 3323 } 3324 3325 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3326 "[%s] setting color aspects failed even though codec advertises support", 3327 mComponentName.c_str()); 3328 return err; 3329} 3330 3331status_t ACodec::setColorAspectsForVideoDecoder( 3332 int32_t width, int32_t height, bool usingNativeWindow, 3333 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3334 DescribeColorAspectsParams params; 3335 InitOMXParams(¶ms); 3336 params.nPortIndex = kPortIndexOutput; 3337 3338 getColorAspectsFromFormat(configFormat, params.sAspects); 3339 if (usingNativeWindow) { 3340 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3341 // The default aspects will be set back to the output format during the 3342 // getFormat phase of configure(). Set non-Unspecified values back into the 3343 // format, in case component does not support this enumeration. 3344 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3345 } 3346 3347 (void)initDescribeColorAspectsIndex(); 3348 3349 // communicate color aspects to codec 3350 return setCodecColorAspects(params); 3351} 3352 3353status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3354 status_t err = ERROR_UNSUPPORTED; 3355 if (mDescribeColorAspectsIndex) { 3356 err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3357 } 3358 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3359 mComponentName.c_str(), 3360 params.sAspects.mRange, asString(params.sAspects.mRange), 3361 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3362 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3363 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3364 err, asString(err)); 3365 if (params.bRequestingDataSpace) { 3366 ALOGV("for dataspace %#x", params.nDataSpace); 3367 } 3368 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3369 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3370 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3371 mComponentName.c_str()); 3372 } 3373 return err; 3374} 3375 3376status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3377 DescribeColorAspectsParams params; 3378 InitOMXParams(¶ms); 3379 params.nPortIndex = kPortIndexInput; 3380 status_t err = getCodecColorAspects(params); 3381 if (err == OK) { 3382 // we only set encoder input aspects if codec supports them 3383 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3384 } 3385 return err; 3386} 3387 3388status_t ACodec::getDataSpace( 3389 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3390 bool tryCodec) { 3391 status_t err = OK; 3392 if (tryCodec) { 3393 // request dataspace guidance from codec. 3394 params.bRequestingDataSpace = OMX_TRUE; 3395 err = getCodecColorAspects(params); 3396 params.bRequestingDataSpace = OMX_FALSE; 3397 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3398 *dataSpace = (android_dataspace)params.nDataSpace; 3399 return err; 3400 } else if (err == ERROR_UNSUPPORTED) { 3401 // ignore not-implemented error for dataspace requests 3402 err = OK; 3403 } 3404 } 3405 3406 // this returns legacy versions if available 3407 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3408 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3409 "and dataspace %#x", 3410 mComponentName.c_str(), 3411 params.sAspects.mRange, asString(params.sAspects.mRange), 3412 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3413 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3414 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3415 *dataSpace); 3416 return err; 3417} 3418 3419 3420status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3421 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3422 android_dataspace *dataSpace) { 3423 DescribeColorAspectsParams params; 3424 InitOMXParams(¶ms); 3425 params.nPortIndex = kPortIndexOutput; 3426 3427 // reset default format and get resulting format 3428 getColorAspectsFromFormat(configFormat, params.sAspects); 3429 if (dataSpace != NULL) { 3430 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3431 } 3432 status_t err = setCodecColorAspects(params, true /* readBack */); 3433 3434 // we always set specified aspects for decoders 3435 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3436 3437 if (dataSpace != NULL) { 3438 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3439 if (err == OK) { 3440 err = res; 3441 } 3442 } 3443 3444 return err; 3445} 3446 3447// initial video encoder setup for bytebuffer mode 3448status_t ACodec::setColorAspectsForVideoEncoder( 3449 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3450 // copy config to output format as this is not exposed via getFormat 3451 copyColorConfig(configFormat, outputFormat); 3452 3453 DescribeColorAspectsParams params; 3454 InitOMXParams(¶ms); 3455 params.nPortIndex = kPortIndexInput; 3456 getColorAspectsFromFormat(configFormat, params.sAspects); 3457 3458 (void)initDescribeColorAspectsIndex(); 3459 3460 int32_t usingRecorder; 3461 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3462 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3463 int32_t width, height; 3464 if (configFormat->findInt32("width", &width) 3465 && configFormat->findInt32("height", &height)) { 3466 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3467 status_t err = getDataSpace( 3468 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3469 if (err != OK) { 3470 return err; 3471 } 3472 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3473 } 3474 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3475 } 3476 3477 // communicate color aspects to codec, but do not allow change of the platform aspects 3478 ColorAspects origAspects = params.sAspects; 3479 for (int triesLeft = 2; --triesLeft >= 0; ) { 3480 status_t err = setCodecColorAspects(params, true /* readBack */); 3481 if (err != OK 3482 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3483 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3484 return err; 3485 } 3486 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3487 mComponentName.c_str()); 3488 } 3489 return OK; 3490} 3491 3492status_t ACodec::setHDRStaticInfoForVideoCodec( 3493 OMX_U32 portIndex, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3494 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3495 3496 DescribeHDRStaticInfoParams params; 3497 InitOMXParams(¶ms); 3498 params.nPortIndex = portIndex; 3499 3500 HDRStaticInfo *info = ¶ms.sInfo; 3501 if (getHDRStaticInfoFromFormat(configFormat, info)) { 3502 setHDRStaticInfoIntoFormat(params.sInfo, outputFormat); 3503 } 3504 3505 (void)initDescribeHDRStaticInfoIndex(); 3506 3507 // communicate HDR static Info to codec 3508 return setHDRStaticInfo(params); 3509} 3510 3511// subsequent initial video encoder setup for surface mode 3512status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3513 android_dataspace *dataSpace /* nonnull */) { 3514 DescribeColorAspectsParams params; 3515 InitOMXParams(¶ms); 3516 params.nPortIndex = kPortIndexInput; 3517 ColorAspects &aspects = params.sAspects; 3518 3519 // reset default format and store resulting format into both input and output formats 3520 getColorAspectsFromFormat(mConfigFormat, aspects); 3521 int32_t width, height; 3522 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3523 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3524 } 3525 setColorAspectsIntoFormat(aspects, mInputFormat); 3526 setColorAspectsIntoFormat(aspects, mOutputFormat); 3527 3528 // communicate color aspects to codec, but do not allow any change 3529 ColorAspects origAspects = aspects; 3530 status_t err = OK; 3531 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3532 status_t err = setCodecColorAspects(params, true /* readBack */); 3533 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3534 break; 3535 } 3536 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3537 mComponentName.c_str()); 3538 } 3539 3540 *dataSpace = HAL_DATASPACE_BT709; 3541 aspects = origAspects; // restore desired color aspects 3542 status_t res = getDataSpace( 3543 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3544 if (err == OK) { 3545 err = res; 3546 } 3547 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3548 mInputFormat->setBuffer( 3549 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3550 3551 // update input format with codec supported color aspects (basically set unsupported 3552 // aspects to Unspecified) 3553 if (err == OK) { 3554 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3555 } 3556 3557 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3558 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3559 3560 return err; 3561} 3562 3563status_t ACodec::getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format) { 3564 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3565 DescribeHDRStaticInfoParams params; 3566 InitOMXParams(¶ms); 3567 params.nPortIndex = portIndex; 3568 3569 status_t err = getHDRStaticInfo(params); 3570 if (err == OK) { 3571 // we only set decodec output HDRStaticInfo if codec supports them 3572 setHDRStaticInfoIntoFormat(params.sInfo, format); 3573 } 3574 return err; 3575} 3576 3577status_t ACodec::initDescribeHDRStaticInfoIndex() { 3578 status_t err = mOMX->getExtensionIndex( 3579 mNode, "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex); 3580 if (err != OK) { 3581 mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0; 3582 } 3583 return err; 3584} 3585 3586status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) { 3587 status_t err = ERROR_UNSUPPORTED; 3588 if (mDescribeHDRStaticInfoIndex) { 3589 err = mOMX->setConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3590 } 3591 3592 const HDRStaticInfo *info = ¶ms.sInfo; 3593 ALOGV("[%s] setting HDRStaticInfo (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, " 3594 "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)", 3595 mComponentName.c_str(), 3596 info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y, 3597 info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y, 3598 info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance, 3599 info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel); 3600 3601 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3602 "[%s] setting HDRStaticInfo failed even though codec advertises support", 3603 mComponentName.c_str()); 3604 return err; 3605} 3606 3607status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms) { 3608 status_t err = ERROR_UNSUPPORTED; 3609 if (mDescribeHDRStaticInfoIndex) { 3610 err = mOMX->getConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3611 } 3612 3613 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3614 "[%s] getting HDRStaticInfo failed even though codec advertises support", 3615 mComponentName.c_str()); 3616 return err; 3617} 3618 3619status_t ACodec::setupVideoEncoder( 3620 const char *mime, const sp<AMessage> &msg, 3621 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3622 int32_t tmp; 3623 if (!msg->findInt32("color-format", &tmp)) { 3624 return INVALID_OPERATION; 3625 } 3626 3627 OMX_COLOR_FORMATTYPE colorFormat = 3628 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3629 3630 status_t err = setVideoPortFormatType( 3631 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3632 3633 if (err != OK) { 3634 ALOGE("[%s] does not support color format %d", 3635 mComponentName.c_str(), colorFormat); 3636 3637 return err; 3638 } 3639 3640 /* Input port configuration */ 3641 3642 OMX_PARAM_PORTDEFINITIONTYPE def; 3643 InitOMXParams(&def); 3644 3645 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3646 3647 def.nPortIndex = kPortIndexInput; 3648 3649 err = mOMX->getParameter( 3650 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3651 3652 if (err != OK) { 3653 return err; 3654 } 3655 3656 int32_t width, height, bitrate; 3657 if (!msg->findInt32("width", &width) 3658 || !msg->findInt32("height", &height) 3659 || !msg->findInt32("bitrate", &bitrate)) { 3660 return INVALID_OPERATION; 3661 } 3662 3663 video_def->nFrameWidth = width; 3664 video_def->nFrameHeight = height; 3665 3666 int32_t stride; 3667 if (!msg->findInt32("stride", &stride)) { 3668 stride = width; 3669 } 3670 3671 video_def->nStride = stride; 3672 3673 int32_t sliceHeight; 3674 if (!msg->findInt32("slice-height", &sliceHeight)) { 3675 sliceHeight = height; 3676 } 3677 3678 video_def->nSliceHeight = sliceHeight; 3679 3680 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3681 3682 float frameRate; 3683 if (!msg->findFloat("frame-rate", &frameRate)) { 3684 int32_t tmp; 3685 if (!msg->findInt32("frame-rate", &tmp)) { 3686 return INVALID_OPERATION; 3687 } 3688 frameRate = (float)tmp; 3689 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3690 } 3691 3692 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3693 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3694 // this is redundant as it was already set up in setVideoPortFormatType 3695 // FIXME for now skip this only for flexible YUV formats 3696 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3697 video_def->eColorFormat = colorFormat; 3698 } 3699 3700 err = mOMX->setParameter( 3701 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3702 3703 if (err != OK) { 3704 ALOGE("[%s] failed to set input port definition parameters.", 3705 mComponentName.c_str()); 3706 3707 return err; 3708 } 3709 3710 /* Output port configuration */ 3711 3712 OMX_VIDEO_CODINGTYPE compressionFormat; 3713 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3714 3715 if (err != OK) { 3716 return err; 3717 } 3718 3719 err = setVideoPortFormatType( 3720 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3721 3722 if (err != OK) { 3723 ALOGE("[%s] does not support compression format %d", 3724 mComponentName.c_str(), compressionFormat); 3725 3726 return err; 3727 } 3728 3729 def.nPortIndex = kPortIndexOutput; 3730 3731 err = mOMX->getParameter( 3732 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3733 3734 if (err != OK) { 3735 return err; 3736 } 3737 3738 video_def->nFrameWidth = width; 3739 video_def->nFrameHeight = height; 3740 video_def->xFramerate = 0; 3741 video_def->nBitrate = bitrate; 3742 video_def->eCompressionFormat = compressionFormat; 3743 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3744 3745 err = mOMX->setParameter( 3746 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3747 3748 if (err != OK) { 3749 ALOGE("[%s] failed to set output port definition parameters.", 3750 mComponentName.c_str()); 3751 3752 return err; 3753 } 3754 3755 int32_t intraRefreshPeriod = 0; 3756 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3757 && intraRefreshPeriod >= 0) { 3758 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3759 if (err != OK) { 3760 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3761 mComponentName.c_str()); 3762 err = OK; 3763 } 3764 } 3765 3766 switch (compressionFormat) { 3767 case OMX_VIDEO_CodingMPEG4: 3768 err = setupMPEG4EncoderParameters(msg); 3769 break; 3770 3771 case OMX_VIDEO_CodingH263: 3772 err = setupH263EncoderParameters(msg); 3773 break; 3774 3775 case OMX_VIDEO_CodingAVC: 3776 err = setupAVCEncoderParameters(msg); 3777 break; 3778 3779 case OMX_VIDEO_CodingHEVC: 3780 err = setupHEVCEncoderParameters(msg); 3781 break; 3782 3783 case OMX_VIDEO_CodingVP8: 3784 case OMX_VIDEO_CodingVP9: 3785 err = setupVPXEncoderParameters(msg); 3786 break; 3787 3788 default: 3789 break; 3790 } 3791 3792 if (err != OK) { 3793 return err; 3794 } 3795 3796 // Set up color aspects on input, but propagate them to the output format, as they will 3797 // not be read back from encoder. 3798 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3799 if (err == ERROR_UNSUPPORTED) { 3800 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3801 err = OK; 3802 } 3803 3804 if (err != OK) { 3805 return err; 3806 } 3807 3808 err = setHDRStaticInfoForVideoCodec(kPortIndexInput, msg, outputFormat); 3809 if (err == ERROR_UNSUPPORTED) { // support is optional 3810 ALOGI("[%s] cannot encode HDR static metadata. Ignoring.", mComponentName.c_str()); 3811 err = OK; 3812 } 3813 3814 if (err != OK) { 3815 return err; 3816 } 3817 3818 switch (compressionFormat) { 3819 case OMX_VIDEO_CodingAVC: 3820 case OMX_VIDEO_CodingHEVC: 3821 err = configureTemporalLayers(msg, true /* inConfigure */, outputFormat); 3822 if (err != OK) { 3823 err = OK; // ignore failure 3824 } 3825 break; 3826 3827 case OMX_VIDEO_CodingVP8: 3828 case OMX_VIDEO_CodingVP9: 3829 // TODO: do we need to support android.generic layering? webrtc layering is 3830 // already set up in setupVPXEncoderParameters. 3831 break; 3832 3833 default: 3834 break; 3835 } 3836 3837 if (err == OK) { 3838 ALOGI("setupVideoEncoder succeeded"); 3839 } 3840 3841 return err; 3842} 3843 3844status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3845 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3846 InitOMXParams(¶ms); 3847 params.nPortIndex = kPortIndexOutput; 3848 3849 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3850 3851 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3852 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3853 int32_t mbs; 3854 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3855 return INVALID_OPERATION; 3856 } 3857 params.nCirMBs = mbs; 3858 } 3859 3860 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3861 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3862 int32_t mbs; 3863 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3864 return INVALID_OPERATION; 3865 } 3866 params.nAirMBs = mbs; 3867 3868 int32_t ref; 3869 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3870 return INVALID_OPERATION; 3871 } 3872 params.nAirRef = ref; 3873 } 3874 3875 status_t err = mOMX->setParameter( 3876 mNode, OMX_IndexParamVideoIntraRefresh, 3877 ¶ms, sizeof(params)); 3878 return err; 3879} 3880 3881static OMX_U32 setPFramesSpacing( 3882 int32_t iFramesInterval /* seconds */, int32_t frameRate, uint32_t BFramesSpacing = 0) { 3883 // BFramesSpacing is the number of B frames between I/P frames 3884 // PFramesSpacing (the value to be returned) is the number of P frames between I frames 3885 // 3886 // keyFrameInterval = ((PFramesSpacing + 1) * BFramesSpacing) + PFramesSpacing + 1 3887 // ^^^ ^^^ ^^^ 3888 // number of B frames number of P I frame 3889 // 3890 // = (PFramesSpacing + 1) * (BFramesSpacing + 1) 3891 // 3892 // E.g. 3893 // I P I : I-interval: 8, nPFrames 1, nBFrames 3 3894 // BBB BBB 3895 3896 if (iFramesInterval < 0) { // just 1 key frame 3897 return 0xFFFFFFFE; // don't use maxint as key-frame-interval calculation will add 1 3898 } else if (iFramesInterval == 0) { // just key frames 3899 return 0; 3900 } 3901 3902 // round down as key-frame-interval is an upper limit 3903 uint32_t keyFrameInterval = uint32_t(frameRate * iFramesInterval); 3904 OMX_U32 ret = keyFrameInterval / (BFramesSpacing + 1); 3905 return ret > 0 ? ret - 1 : 0; 3906} 3907 3908static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 3909 int32_t tmp; 3910 if (!msg->findInt32("bitrate-mode", &tmp)) { 3911 return OMX_Video_ControlRateVariable; 3912 } 3913 3914 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 3915} 3916 3917status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 3918 int32_t bitrate, iFrameInterval; 3919 if (!msg->findInt32("bitrate", &bitrate) 3920 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3921 return INVALID_OPERATION; 3922 } 3923 3924 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3925 3926 float frameRate; 3927 if (!msg->findFloat("frame-rate", &frameRate)) { 3928 int32_t tmp; 3929 if (!msg->findInt32("frame-rate", &tmp)) { 3930 return INVALID_OPERATION; 3931 } 3932 frameRate = (float)tmp; 3933 } 3934 3935 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 3936 InitOMXParams(&mpeg4type); 3937 mpeg4type.nPortIndex = kPortIndexOutput; 3938 3939 status_t err = mOMX->getParameter( 3940 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3941 3942 if (err != OK) { 3943 return err; 3944 } 3945 3946 mpeg4type.nSliceHeaderSpacing = 0; 3947 mpeg4type.bSVH = OMX_FALSE; 3948 mpeg4type.bGov = OMX_FALSE; 3949 3950 mpeg4type.nAllowedPictureTypes = 3951 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3952 3953 mpeg4type.nBFrames = 0; 3954 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, mpeg4type.nBFrames); 3955 if (mpeg4type.nPFrames == 0) { 3956 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3957 } 3958 mpeg4type.nIDCVLCThreshold = 0; 3959 mpeg4type.bACPred = OMX_TRUE; 3960 mpeg4type.nMaxPacketSize = 256; 3961 mpeg4type.nTimeIncRes = 1000; 3962 mpeg4type.nHeaderExtension = 0; 3963 mpeg4type.bReversibleVLC = OMX_FALSE; 3964 3965 int32_t profile; 3966 if (msg->findInt32("profile", &profile)) { 3967 int32_t level; 3968 if (!msg->findInt32("level", &level)) { 3969 return INVALID_OPERATION; 3970 } 3971 3972 err = verifySupportForProfileAndLevel(profile, level); 3973 3974 if (err != OK) { 3975 return err; 3976 } 3977 3978 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 3979 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 3980 } 3981 3982 err = mOMX->setParameter( 3983 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3984 3985 if (err != OK) { 3986 return err; 3987 } 3988 3989 err = configureBitrate(bitrate, bitrateMode); 3990 3991 if (err != OK) { 3992 return err; 3993 } 3994 3995 return setupErrorCorrectionParameters(); 3996} 3997 3998status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 3999 int32_t bitrate, iFrameInterval; 4000 if (!msg->findInt32("bitrate", &bitrate) 4001 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4002 return INVALID_OPERATION; 4003 } 4004 4005 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4006 4007 float frameRate; 4008 if (!msg->findFloat("frame-rate", &frameRate)) { 4009 int32_t tmp; 4010 if (!msg->findInt32("frame-rate", &tmp)) { 4011 return INVALID_OPERATION; 4012 } 4013 frameRate = (float)tmp; 4014 } 4015 4016 OMX_VIDEO_PARAM_H263TYPE h263type; 4017 InitOMXParams(&h263type); 4018 h263type.nPortIndex = kPortIndexOutput; 4019 4020 status_t err = mOMX->getParameter( 4021 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 4022 4023 if (err != OK) { 4024 return err; 4025 } 4026 4027 h263type.nAllowedPictureTypes = 4028 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4029 4030 h263type.nBFrames = 0; 4031 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h263type.nBFrames); 4032 if (h263type.nPFrames == 0) { 4033 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4034 } 4035 4036 int32_t profile; 4037 if (msg->findInt32("profile", &profile)) { 4038 int32_t level; 4039 if (!msg->findInt32("level", &level)) { 4040 return INVALID_OPERATION; 4041 } 4042 4043 err = verifySupportForProfileAndLevel(profile, level); 4044 4045 if (err != OK) { 4046 return err; 4047 } 4048 4049 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 4050 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 4051 } 4052 4053 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 4054 h263type.bForceRoundingTypeToZero = OMX_FALSE; 4055 h263type.nPictureHeaderRepetition = 0; 4056 h263type.nGOBHeaderInterval = 0; 4057 4058 err = mOMX->setParameter( 4059 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 4060 4061 if (err != OK) { 4062 return err; 4063 } 4064 4065 err = configureBitrate(bitrate, bitrateMode); 4066 4067 if (err != OK) { 4068 return err; 4069 } 4070 4071 return setupErrorCorrectionParameters(); 4072} 4073 4074// static 4075int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 4076 int width, int height, int rate, int bitrate, 4077 OMX_VIDEO_AVCPROFILETYPE profile) { 4078 // convert bitrate to main/baseline profile kbps equivalent 4079 switch (profile) { 4080 case OMX_VIDEO_AVCProfileHigh10: 4081 bitrate = divUp(bitrate, 3000); break; 4082 case OMX_VIDEO_AVCProfileHigh: 4083 bitrate = divUp(bitrate, 1250); break; 4084 default: 4085 bitrate = divUp(bitrate, 1000); break; 4086 } 4087 4088 // convert size and rate to MBs 4089 width = divUp(width, 16); 4090 height = divUp(height, 16); 4091 int mbs = width * height; 4092 rate *= mbs; 4093 int maxDimension = max(width, height); 4094 4095 static const int limits[][5] = { 4096 /* MBps MB dim bitrate level */ 4097 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 4098 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 4099 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 4100 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 4101 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 4102 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 4103 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 4104 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 4105 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 4106 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 4107 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 4108 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 4109 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 4110 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 4111 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 4112 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 4113 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 4114 }; 4115 4116 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 4117 const int (&limit)[5] = limits[i]; 4118 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 4119 && bitrate <= limit[3]) { 4120 return limit[4]; 4121 } 4122 } 4123 return 0; 4124} 4125 4126status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 4127 int32_t bitrate, iFrameInterval; 4128 if (!msg->findInt32("bitrate", &bitrate) 4129 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4130 return INVALID_OPERATION; 4131 } 4132 4133 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4134 4135 float frameRate; 4136 if (!msg->findFloat("frame-rate", &frameRate)) { 4137 int32_t tmp; 4138 if (!msg->findInt32("frame-rate", &tmp)) { 4139 return INVALID_OPERATION; 4140 } 4141 frameRate = (float)tmp; 4142 } 4143 4144 status_t err = OK; 4145 int32_t intraRefreshMode = 0; 4146 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 4147 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 4148 if (err != OK) { 4149 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 4150 err, intraRefreshMode); 4151 return err; 4152 } 4153 } 4154 4155 OMX_VIDEO_PARAM_AVCTYPE h264type; 4156 InitOMXParams(&h264type); 4157 h264type.nPortIndex = kPortIndexOutput; 4158 4159 err = mOMX->getParameter( 4160 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4161 4162 if (err != OK) { 4163 return err; 4164 } 4165 4166 h264type.nAllowedPictureTypes = 4167 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4168 4169 int32_t profile; 4170 if (msg->findInt32("profile", &profile)) { 4171 int32_t level; 4172 if (!msg->findInt32("level", &level)) { 4173 return INVALID_OPERATION; 4174 } 4175 4176 err = verifySupportForProfileAndLevel(profile, level); 4177 4178 if (err != OK) { 4179 return err; 4180 } 4181 4182 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 4183 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 4184 } else { 4185 // Use baseline profile for AVC recording if profile is not specified. 4186 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 4187 } 4188 4189 ALOGI("setupAVCEncoderParameters with [profile: %s] [level: %s]", 4190 asString(h264type.eProfile), asString(h264type.eLevel)); 4191 4192 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 4193 h264type.nSliceHeaderSpacing = 0; 4194 h264type.bUseHadamard = OMX_TRUE; 4195 h264type.nRefFrames = 1; 4196 h264type.nBFrames = 0; 4197 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4198 if (h264type.nPFrames == 0) { 4199 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4200 } 4201 h264type.nRefIdx10ActiveMinus1 = 0; 4202 h264type.nRefIdx11ActiveMinus1 = 0; 4203 h264type.bEntropyCodingCABAC = OMX_FALSE; 4204 h264type.bWeightedPPrediction = OMX_FALSE; 4205 h264type.bconstIpred = OMX_FALSE; 4206 h264type.bDirect8x8Inference = OMX_FALSE; 4207 h264type.bDirectSpatialTemporal = OMX_FALSE; 4208 h264type.nCabacInitIdc = 0; 4209 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 4210 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 4211 h264type.nSliceHeaderSpacing = 0; 4212 h264type.bUseHadamard = OMX_TRUE; 4213 h264type.nRefFrames = 2; 4214 h264type.nBFrames = 1; 4215 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4216 h264type.nAllowedPictureTypes = 4217 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 4218 h264type.nRefIdx10ActiveMinus1 = 0; 4219 h264type.nRefIdx11ActiveMinus1 = 0; 4220 h264type.bEntropyCodingCABAC = OMX_TRUE; 4221 h264type.bWeightedPPrediction = OMX_TRUE; 4222 h264type.bconstIpred = OMX_TRUE; 4223 h264type.bDirect8x8Inference = OMX_TRUE; 4224 h264type.bDirectSpatialTemporal = OMX_TRUE; 4225 h264type.nCabacInitIdc = 1; 4226 } 4227 4228 if (h264type.nBFrames != 0) { 4229 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 4230 } 4231 4232 h264type.bEnableUEP = OMX_FALSE; 4233 h264type.bEnableFMO = OMX_FALSE; 4234 h264type.bEnableASO = OMX_FALSE; 4235 h264type.bEnableRS = OMX_FALSE; 4236 h264type.bFrameMBsOnly = OMX_TRUE; 4237 h264type.bMBAFF = OMX_FALSE; 4238 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 4239 4240 err = mOMX->setParameter( 4241 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4242 4243 if (err != OK) { 4244 return err; 4245 } 4246 4247 return configureBitrate(bitrate, bitrateMode); 4248} 4249 4250status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 4251 int32_t bitrate, iFrameInterval; 4252 if (!msg->findInt32("bitrate", &bitrate) 4253 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4254 return INVALID_OPERATION; 4255 } 4256 4257 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4258 4259 float frameRate; 4260 if (!msg->findFloat("frame-rate", &frameRate)) { 4261 int32_t tmp; 4262 if (!msg->findInt32("frame-rate", &tmp)) { 4263 return INVALID_OPERATION; 4264 } 4265 frameRate = (float)tmp; 4266 } 4267 4268 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 4269 InitOMXParams(&hevcType); 4270 hevcType.nPortIndex = kPortIndexOutput; 4271 4272 status_t err = OK; 4273 err = mOMX->getParameter( 4274 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4275 if (err != OK) { 4276 return err; 4277 } 4278 4279 int32_t profile; 4280 if (msg->findInt32("profile", &profile)) { 4281 int32_t level; 4282 if (!msg->findInt32("level", &level)) { 4283 return INVALID_OPERATION; 4284 } 4285 4286 err = verifySupportForProfileAndLevel(profile, level); 4287 if (err != OK) { 4288 return err; 4289 } 4290 4291 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 4292 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 4293 } 4294 // TODO: finer control? 4295 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1; 4296 4297 err = mOMX->setParameter( 4298 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4299 if (err != OK) { 4300 return err; 4301 } 4302 4303 return configureBitrate(bitrate, bitrateMode); 4304} 4305 4306status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { 4307 int32_t bitrate; 4308 int32_t iFrameInterval = 0; 4309 size_t tsLayers = 0; 4310 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 4311 OMX_VIDEO_VPXTemporalLayerPatternNone; 4312 static const uint32_t kVp8LayerRateAlloction 4313 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 4314 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 4315 {100, 100, 100}, // 1 layer 4316 { 60, 100, 100}, // 2 layers {60%, 40%} 4317 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 4318 }; 4319 if (!msg->findInt32("bitrate", &bitrate)) { 4320 return INVALID_OPERATION; 4321 } 4322 msg->findInt32("i-frame-interval", &iFrameInterval); 4323 4324 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4325 4326 float frameRate; 4327 if (!msg->findFloat("frame-rate", &frameRate)) { 4328 int32_t tmp; 4329 if (!msg->findInt32("frame-rate", &tmp)) { 4330 return INVALID_OPERATION; 4331 } 4332 frameRate = (float)tmp; 4333 } 4334 4335 AString tsSchema; 4336 if (msg->findString("ts-schema", &tsSchema)) { 4337 unsigned int numLayers = 0; 4338 unsigned int numBLayers = 0; 4339 int tags; 4340 char dummy; 4341 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1 4342 && numLayers > 0) { 4343 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4344 tsLayers = numLayers; 4345 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c", 4346 &numLayers, &dummy, &numBLayers, &dummy)) 4347 && (tags == 1 || (tags == 3 && dummy == '+')) 4348 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) { 4349 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4350 // VPX does not have a concept of B-frames, so just count all layers 4351 tsLayers = numLayers + numBLayers; 4352 } else { 4353 ALOGW("Ignoring unsupported ts-schema [%s]", tsSchema.c_str()); 4354 } 4355 tsLayers = min(tsLayers, (size_t)OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS); 4356 } 4357 4358 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4359 InitOMXParams(&vp8type); 4360 vp8type.nPortIndex = kPortIndexOutput; 4361 status_t err = mOMX->getParameter( 4362 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4363 &vp8type, sizeof(vp8type)); 4364 4365 if (err == OK) { 4366 if (iFrameInterval > 0) { 4367 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1; 4368 } 4369 vp8type.eTemporalPattern = pattern; 4370 vp8type.nTemporalLayerCount = tsLayers; 4371 if (tsLayers > 0) { 4372 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4373 vp8type.nTemporalLayerBitrateRatio[i] = 4374 kVp8LayerRateAlloction[tsLayers - 1][i]; 4375 } 4376 } 4377 if (bitrateMode == OMX_Video_ControlRateConstant) { 4378 vp8type.nMinQuantizer = 2; 4379 vp8type.nMaxQuantizer = 63; 4380 } 4381 4382 err = mOMX->setParameter( 4383 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4384 &vp8type, sizeof(vp8type)); 4385 if (err != OK) { 4386 ALOGW("Extended VP8 parameters set failed: %d", err); 4387 } 4388 } 4389 4390 return configureBitrate(bitrate, bitrateMode); 4391} 4392 4393status_t ACodec::verifySupportForProfileAndLevel( 4394 int32_t profile, int32_t level) { 4395 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4396 InitOMXParams(¶ms); 4397 params.nPortIndex = kPortIndexOutput; 4398 4399 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4400 params.nProfileIndex = index; 4401 status_t err = mOMX->getParameter( 4402 mNode, 4403 OMX_IndexParamVideoProfileLevelQuerySupported, 4404 ¶ms, 4405 sizeof(params)); 4406 4407 if (err != OK) { 4408 return err; 4409 } 4410 4411 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4412 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4413 4414 if (profile == supportedProfile && level <= supportedLevel) { 4415 return OK; 4416 } 4417 4418 if (index == kMaxIndicesToCheck) { 4419 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4420 mComponentName.c_str(), index, 4421 params.eProfile, params.eLevel); 4422 } 4423 } 4424 return ERROR_UNSUPPORTED; 4425} 4426 4427status_t ACodec::configureBitrate( 4428 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4429 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4430 InitOMXParams(&bitrateType); 4431 bitrateType.nPortIndex = kPortIndexOutput; 4432 4433 status_t err = mOMX->getParameter( 4434 mNode, OMX_IndexParamVideoBitrate, 4435 &bitrateType, sizeof(bitrateType)); 4436 4437 if (err != OK) { 4438 return err; 4439 } 4440 4441 bitrateType.eControlRate = bitrateMode; 4442 bitrateType.nTargetBitrate = bitrate; 4443 4444 return mOMX->setParameter( 4445 mNode, OMX_IndexParamVideoBitrate, 4446 &bitrateType, sizeof(bitrateType)); 4447} 4448 4449status_t ACodec::setupErrorCorrectionParameters() { 4450 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4451 InitOMXParams(&errorCorrectionType); 4452 errorCorrectionType.nPortIndex = kPortIndexOutput; 4453 4454 status_t err = mOMX->getParameter( 4455 mNode, OMX_IndexParamVideoErrorCorrection, 4456 &errorCorrectionType, sizeof(errorCorrectionType)); 4457 4458 if (err != OK) { 4459 return OK; // Optional feature. Ignore this failure 4460 } 4461 4462 errorCorrectionType.bEnableHEC = OMX_FALSE; 4463 errorCorrectionType.bEnableResync = OMX_TRUE; 4464 errorCorrectionType.nResynchMarkerSpacing = 256; 4465 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4466 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4467 4468 return mOMX->setParameter( 4469 mNode, OMX_IndexParamVideoErrorCorrection, 4470 &errorCorrectionType, sizeof(errorCorrectionType)); 4471} 4472 4473status_t ACodec::setVideoFormatOnPort( 4474 OMX_U32 portIndex, 4475 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4476 float frameRate) { 4477 OMX_PARAM_PORTDEFINITIONTYPE def; 4478 InitOMXParams(&def); 4479 def.nPortIndex = portIndex; 4480 4481 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4482 4483 status_t err = mOMX->getParameter( 4484 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4485 if (err != OK) { 4486 return err; 4487 } 4488 4489 if (portIndex == kPortIndexInput) { 4490 // XXX Need a (much) better heuristic to compute input buffer sizes. 4491 const size_t X = 64 * 1024; 4492 if (def.nBufferSize < X) { 4493 def.nBufferSize = X; 4494 } 4495 } 4496 4497 if (def.eDomain != OMX_PortDomainVideo) { 4498 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4499 return FAILED_TRANSACTION; 4500 } 4501 4502 video_def->nFrameWidth = width; 4503 video_def->nFrameHeight = height; 4504 4505 if (portIndex == kPortIndexInput) { 4506 video_def->eCompressionFormat = compressionFormat; 4507 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4508 if (frameRate >= 0) { 4509 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4510 } 4511 } 4512 4513 err = mOMX->setParameter( 4514 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4515 4516 return err; 4517} 4518 4519status_t ACodec::initNativeWindow() { 4520 if (mNativeWindow != NULL) { 4521 return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4522 } 4523 4524 mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4525 return OK; 4526} 4527 4528size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4529 size_t n = 0; 4530 4531 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4532 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4533 4534 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4535 ++n; 4536 } 4537 } 4538 4539 return n; 4540} 4541 4542size_t ACodec::countBuffersOwnedByNativeWindow() const { 4543 size_t n = 0; 4544 4545 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4546 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4547 4548 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4549 ++n; 4550 } 4551 } 4552 4553 return n; 4554} 4555 4556void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4557 if (mNativeWindow == NULL) { 4558 return; 4559 } 4560 4561 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4562 && dequeueBufferFromNativeWindow() != NULL) { 4563 // these buffers will be submitted as regular buffers; account for this 4564 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4565 --mMetadataBuffersToSubmit; 4566 } 4567 } 4568} 4569 4570bool ACodec::allYourBuffersAreBelongToUs( 4571 OMX_U32 portIndex) { 4572 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4573 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4574 4575 if (info->mStatus != BufferInfo::OWNED_BY_US 4576 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4577 ALOGV("[%s] Buffer %u on port %u still has status %d", 4578 mComponentName.c_str(), 4579 info->mBufferID, portIndex, info->mStatus); 4580 return false; 4581 } 4582 } 4583 4584 return true; 4585} 4586 4587bool ACodec::allYourBuffersAreBelongToUs() { 4588 return allYourBuffersAreBelongToUs(kPortIndexInput) 4589 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4590} 4591 4592void ACodec::deferMessage(const sp<AMessage> &msg) { 4593 mDeferredQueue.push_back(msg); 4594} 4595 4596void ACodec::processDeferredMessages() { 4597 List<sp<AMessage> > queue = mDeferredQueue; 4598 mDeferredQueue.clear(); 4599 4600 List<sp<AMessage> >::iterator it = queue.begin(); 4601 while (it != queue.end()) { 4602 onMessageReceived(*it++); 4603 } 4604} 4605 4606status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4607 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4608 OMX_PARAM_PORTDEFINITIONTYPE def; 4609 InitOMXParams(&def); 4610 def.nPortIndex = portIndex; 4611 4612 status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4613 if (err != OK) { 4614 return err; 4615 } 4616 4617 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4618 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4619 return BAD_VALUE; 4620 } 4621 4622 switch (def.eDomain) { 4623 case OMX_PortDomainVideo: 4624 { 4625 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4626 switch ((int)videoDef->eCompressionFormat) { 4627 case OMX_VIDEO_CodingUnused: 4628 { 4629 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4630 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4631 4632 notify->setInt32("stride", videoDef->nStride); 4633 notify->setInt32("slice-height", videoDef->nSliceHeight); 4634 notify->setInt32("color-format", videoDef->eColorFormat); 4635 4636 if (mNativeWindow == NULL) { 4637 DescribeColorFormat2Params describeParams; 4638 InitOMXParams(&describeParams); 4639 describeParams.eColorFormat = videoDef->eColorFormat; 4640 describeParams.nFrameWidth = videoDef->nFrameWidth; 4641 describeParams.nFrameHeight = videoDef->nFrameHeight; 4642 describeParams.nStride = videoDef->nStride; 4643 describeParams.nSliceHeight = videoDef->nSliceHeight; 4644 describeParams.bUsingNativeBuffers = OMX_FALSE; 4645 4646 if (DescribeColorFormat(mOMX, mNode, describeParams)) { 4647 notify->setBuffer( 4648 "image-data", 4649 ABuffer::CreateAsCopy( 4650 &describeParams.sMediaImage, 4651 sizeof(describeParams.sMediaImage))); 4652 4653 MediaImage2 &img = describeParams.sMediaImage; 4654 MediaImage2::PlaneInfo *plane = img.mPlane; 4655 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4656 mComponentName.c_str(), img.mWidth, img.mHeight, 4657 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4658 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4659 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4660 } 4661 } 4662 4663 int32_t width = (int32_t)videoDef->nFrameWidth; 4664 int32_t height = (int32_t)videoDef->nFrameHeight; 4665 4666 if (portIndex == kPortIndexOutput) { 4667 OMX_CONFIG_RECTTYPE rect; 4668 InitOMXParams(&rect); 4669 rect.nPortIndex = portIndex; 4670 4671 if (mOMX->getConfig( 4672 mNode, 4673 (portIndex == kPortIndexOutput ? 4674 OMX_IndexConfigCommonOutputCrop : 4675 OMX_IndexConfigCommonInputCrop), 4676 &rect, sizeof(rect)) != OK) { 4677 rect.nLeft = 0; 4678 rect.nTop = 0; 4679 rect.nWidth = videoDef->nFrameWidth; 4680 rect.nHeight = videoDef->nFrameHeight; 4681 } 4682 4683 if (rect.nLeft < 0 || 4684 rect.nTop < 0 || 4685 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4686 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4687 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 4688 rect.nLeft, rect.nTop, 4689 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 4690 videoDef->nFrameWidth, videoDef->nFrameHeight); 4691 return BAD_VALUE; 4692 } 4693 4694 notify->setRect( 4695 "crop", 4696 rect.nLeft, 4697 rect.nTop, 4698 rect.nLeft + rect.nWidth - 1, 4699 rect.nTop + rect.nHeight - 1); 4700 4701 width = rect.nWidth; 4702 height = rect.nHeight; 4703 4704 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4705 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4706 width, height, mConfigFormat, notify, 4707 mUsingNativeWindow ? &dataSpace : NULL); 4708 if (mUsingNativeWindow) { 4709 notify->setInt32("android._dataspace", dataSpace); 4710 } 4711 (void)getHDRStaticInfoForVideoCodec(kPortIndexOutput, notify); 4712 } else { 4713 (void)getInputColorAspectsForVideoEncoder(notify); 4714 if (mConfigFormat->contains("hdr-static-info")) { 4715 (void)getHDRStaticInfoForVideoCodec(kPortIndexInput, notify); 4716 } 4717 } 4718 4719 break; 4720 } 4721 4722 case OMX_VIDEO_CodingVP8: 4723 case OMX_VIDEO_CodingVP9: 4724 { 4725 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4726 InitOMXParams(&vp8type); 4727 vp8type.nPortIndex = kPortIndexOutput; 4728 status_t err = mOMX->getParameter( 4729 mNode, 4730 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4731 &vp8type, 4732 sizeof(vp8type)); 4733 4734 if (err == OK) { 4735 AString tsSchema = "none"; 4736 if (vp8type.eTemporalPattern 4737 == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { 4738 switch (vp8type.nTemporalLayerCount) { 4739 case 1: 4740 { 4741 tsSchema = "webrtc.vp8.1-layer"; 4742 break; 4743 } 4744 case 2: 4745 { 4746 tsSchema = "webrtc.vp8.2-layer"; 4747 break; 4748 } 4749 case 3: 4750 { 4751 tsSchema = "webrtc.vp8.3-layer"; 4752 break; 4753 } 4754 default: 4755 { 4756 break; 4757 } 4758 } 4759 } 4760 notify->setString("ts-schema", tsSchema); 4761 } 4762 // Fall through to set up mime. 4763 } 4764 4765 default: 4766 { 4767 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 4768 // should be CodingUnused 4769 ALOGE("Raw port video compression format is %s(%d)", 4770 asString(videoDef->eCompressionFormat), 4771 videoDef->eCompressionFormat); 4772 return BAD_VALUE; 4773 } 4774 AString mime; 4775 if (GetMimeTypeForVideoCoding( 4776 videoDef->eCompressionFormat, &mime) != OK) { 4777 notify->setString("mime", "application/octet-stream"); 4778 } else { 4779 notify->setString("mime", mime.c_str()); 4780 } 4781 uint32_t intraRefreshPeriod = 0; 4782 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 4783 && intraRefreshPeriod > 0) { 4784 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 4785 } 4786 break; 4787 } 4788 } 4789 notify->setInt32("width", videoDef->nFrameWidth); 4790 notify->setInt32("height", videoDef->nFrameHeight); 4791 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 4792 portIndex == kPortIndexInput ? "input" : "output", 4793 notify->debugString().c_str()); 4794 4795 break; 4796 } 4797 4798 case OMX_PortDomainAudio: 4799 { 4800 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 4801 4802 switch ((int)audioDef->eEncoding) { 4803 case OMX_AUDIO_CodingPCM: 4804 { 4805 OMX_AUDIO_PARAM_PCMMODETYPE params; 4806 InitOMXParams(¶ms); 4807 params.nPortIndex = portIndex; 4808 4809 err = mOMX->getParameter( 4810 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4811 if (err != OK) { 4812 return err; 4813 } 4814 4815 if (params.nChannels <= 0 4816 || (params.nChannels != 1 && !params.bInterleaved) 4817 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 4818 ALOGE("unsupported PCM port: %u channels%s, %u-bit", 4819 params.nChannels, 4820 params.bInterleaved ? " interleaved" : "", 4821 params.nBitPerSample); 4822 return FAILED_TRANSACTION; 4823 } 4824 4825 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 4826 notify->setInt32("channel-count", params.nChannels); 4827 notify->setInt32("sample-rate", params.nSamplingRate); 4828 4829 AudioEncoding encoding = kAudioEncodingPcm16bit; 4830 if (params.eNumData == OMX_NumericalDataUnsigned 4831 && params.nBitPerSample == 8u) { 4832 encoding = kAudioEncodingPcm8bit; 4833 } else if (params.eNumData == OMX_NumericalDataFloat 4834 && params.nBitPerSample == 32u) { 4835 encoding = kAudioEncodingPcmFloat; 4836 } else if (params.nBitPerSample != 16u 4837 || params.eNumData != OMX_NumericalDataSigned) { 4838 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", 4839 asString(params.eNumData), params.eNumData, 4840 asString(params.ePCMMode), params.ePCMMode); 4841 return FAILED_TRANSACTION; 4842 } 4843 notify->setInt32("pcm-encoding", encoding); 4844 4845 if (mChannelMaskPresent) { 4846 notify->setInt32("channel-mask", mChannelMask); 4847 } 4848 break; 4849 } 4850 4851 case OMX_AUDIO_CodingAAC: 4852 { 4853 OMX_AUDIO_PARAM_AACPROFILETYPE params; 4854 InitOMXParams(¶ms); 4855 params.nPortIndex = portIndex; 4856 4857 err = mOMX->getParameter( 4858 mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 4859 if (err != OK) { 4860 return err; 4861 } 4862 4863 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 4864 notify->setInt32("channel-count", params.nChannels); 4865 notify->setInt32("sample-rate", params.nSampleRate); 4866 break; 4867 } 4868 4869 case OMX_AUDIO_CodingAMR: 4870 { 4871 OMX_AUDIO_PARAM_AMRTYPE params; 4872 InitOMXParams(¶ms); 4873 params.nPortIndex = portIndex; 4874 4875 err = mOMX->getParameter( 4876 mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 4877 if (err != OK) { 4878 return err; 4879 } 4880 4881 notify->setInt32("channel-count", 1); 4882 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 4883 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 4884 notify->setInt32("sample-rate", 16000); 4885 } else { 4886 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 4887 notify->setInt32("sample-rate", 8000); 4888 } 4889 break; 4890 } 4891 4892 case OMX_AUDIO_CodingFLAC: 4893 { 4894 OMX_AUDIO_PARAM_FLACTYPE params; 4895 InitOMXParams(¶ms); 4896 params.nPortIndex = portIndex; 4897 4898 err = mOMX->getParameter( 4899 mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 4900 if (err != OK) { 4901 return err; 4902 } 4903 4904 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 4905 notify->setInt32("channel-count", params.nChannels); 4906 notify->setInt32("sample-rate", params.nSampleRate); 4907 break; 4908 } 4909 4910 case OMX_AUDIO_CodingMP3: 4911 { 4912 OMX_AUDIO_PARAM_MP3TYPE params; 4913 InitOMXParams(¶ms); 4914 params.nPortIndex = portIndex; 4915 4916 err = mOMX->getParameter( 4917 mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 4918 if (err != OK) { 4919 return err; 4920 } 4921 4922 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 4923 notify->setInt32("channel-count", params.nChannels); 4924 notify->setInt32("sample-rate", params.nSampleRate); 4925 break; 4926 } 4927 4928 case OMX_AUDIO_CodingVORBIS: 4929 { 4930 OMX_AUDIO_PARAM_VORBISTYPE params; 4931 InitOMXParams(¶ms); 4932 params.nPortIndex = portIndex; 4933 4934 err = mOMX->getParameter( 4935 mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 4936 if (err != OK) { 4937 return err; 4938 } 4939 4940 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 4941 notify->setInt32("channel-count", params.nChannels); 4942 notify->setInt32("sample-rate", params.nSampleRate); 4943 break; 4944 } 4945 4946 case OMX_AUDIO_CodingAndroidAC3: 4947 { 4948 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 4949 InitOMXParams(¶ms); 4950 params.nPortIndex = portIndex; 4951 4952 err = mOMX->getParameter( 4953 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 4954 ¶ms, sizeof(params)); 4955 if (err != OK) { 4956 return err; 4957 } 4958 4959 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 4960 notify->setInt32("channel-count", params.nChannels); 4961 notify->setInt32("sample-rate", params.nSampleRate); 4962 break; 4963 } 4964 4965 case OMX_AUDIO_CodingAndroidEAC3: 4966 { 4967 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 4968 InitOMXParams(¶ms); 4969 params.nPortIndex = portIndex; 4970 4971 err = mOMX->getParameter( 4972 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 4973 ¶ms, sizeof(params)); 4974 if (err != OK) { 4975 return err; 4976 } 4977 4978 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 4979 notify->setInt32("channel-count", params.nChannels); 4980 notify->setInt32("sample-rate", params.nSampleRate); 4981 break; 4982 } 4983 4984 case OMX_AUDIO_CodingAndroidOPUS: 4985 { 4986 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 4987 InitOMXParams(¶ms); 4988 params.nPortIndex = portIndex; 4989 4990 err = mOMX->getParameter( 4991 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 4992 ¶ms, sizeof(params)); 4993 if (err != OK) { 4994 return err; 4995 } 4996 4997 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 4998 notify->setInt32("channel-count", params.nChannels); 4999 notify->setInt32("sample-rate", params.nSampleRate); 5000 break; 5001 } 5002 5003 case OMX_AUDIO_CodingG711: 5004 { 5005 OMX_AUDIO_PARAM_PCMMODETYPE params; 5006 InitOMXParams(¶ms); 5007 params.nPortIndex = portIndex; 5008 5009 err = mOMX->getParameter( 5010 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5011 if (err != OK) { 5012 return err; 5013 } 5014 5015 const char *mime = NULL; 5016 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 5017 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 5018 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 5019 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 5020 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 5021 mime = MEDIA_MIMETYPE_AUDIO_RAW; 5022 } 5023 notify->setString("mime", mime); 5024 notify->setInt32("channel-count", params.nChannels); 5025 notify->setInt32("sample-rate", params.nSamplingRate); 5026 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 5027 break; 5028 } 5029 5030 case OMX_AUDIO_CodingGSMFR: 5031 { 5032 OMX_AUDIO_PARAM_PCMMODETYPE params; 5033 InitOMXParams(¶ms); 5034 params.nPortIndex = portIndex; 5035 5036 err = mOMX->getParameter( 5037 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5038 if (err != OK) { 5039 return err; 5040 } 5041 5042 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 5043 notify->setInt32("channel-count", params.nChannels); 5044 notify->setInt32("sample-rate", params.nSamplingRate); 5045 break; 5046 } 5047 5048 default: 5049 ALOGE("Unsupported audio coding: %s(%d)\n", 5050 asString(audioDef->eEncoding), audioDef->eEncoding); 5051 return BAD_TYPE; 5052 } 5053 break; 5054 } 5055 5056 default: 5057 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 5058 return BAD_TYPE; 5059 } 5060 5061 return OK; 5062} 5063 5064void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 5065 // aspects are normally communicated in ColorAspects 5066 int32_t range, standard, transfer; 5067 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 5068 5069 // if some aspects are unspecified, use dataspace fields 5070 if (range != 0) { 5071 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 5072 } 5073 if (standard != 0) { 5074 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 5075 } 5076 if (transfer != 0) { 5077 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 5078 } 5079 5080 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 5081 if (range != 0) { 5082 mOutputFormat->setInt32("color-range", range); 5083 } 5084 if (standard != 0) { 5085 mOutputFormat->setInt32("color-standard", standard); 5086 } 5087 if (transfer != 0) { 5088 mOutputFormat->setInt32("color-transfer", transfer); 5089 } 5090 5091 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 5092 "(R:%d(%s), S:%d(%s), T:%d(%s))", 5093 dataSpace, 5094 aspects.mRange, asString(aspects.mRange), 5095 aspects.mPrimaries, asString(aspects.mPrimaries), 5096 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 5097 aspects.mTransfer, asString(aspects.mTransfer), 5098 range, asString((ColorRange)range), 5099 standard, asString((ColorStandard)standard), 5100 transfer, asString((ColorTransfer)transfer)); 5101} 5102 5103void ACodec::onOutputFormatChanged(sp<const AMessage> expectedFormat) { 5104 // store new output format, at the same time mark that this is no longer the first frame 5105 mOutputFormat = mBaseOutputFormat->dup(); 5106 5107 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 5108 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 5109 return; 5110 } 5111 5112 if (expectedFormat != NULL) { 5113 sp<const AMessage> changes = expectedFormat->changesFrom(mOutputFormat); 5114 sp<const AMessage> to = mOutputFormat->changesFrom(expectedFormat); 5115 if (changes->countEntries() != 0 || to->countEntries() != 0) { 5116 ALOGW("[%s] BAD CODEC: Output format changed unexpectedly from (diff) %s to (diff) %s", 5117 mComponentName.c_str(), 5118 changes->debugString(4).c_str(), to->debugString(4).c_str()); 5119 } 5120 } 5121 5122 if (!mIsVideo && !mIsEncoder) { 5123 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 5124 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5125 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 5126 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5127 5128 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 5129 if (mConverter[kPortIndexOutput] != NULL) { 5130 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 5131 } 5132 } 5133 5134 if (mTunneled) { 5135 sendFormatChange(); 5136 } 5137} 5138 5139void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 5140 AString mime; 5141 CHECK(mOutputFormat->findString("mime", &mime)); 5142 5143 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 5144 // notify renderer of the crop change and dataspace change 5145 // NOTE: native window uses extended right-bottom coordinate 5146 int32_t left, top, right, bottom; 5147 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 5148 notify->setRect("crop", left, top, right + 1, bottom + 1); 5149 } 5150 5151 int32_t dataSpace; 5152 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 5153 notify->setInt32("dataspace", dataSpace); 5154 } 5155 } 5156} 5157 5158void ACodec::sendFormatChange() { 5159 AString mime; 5160 CHECK(mOutputFormat->findString("mime", &mime)); 5161 5162 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 5163 int32_t channelCount, sampleRate; 5164 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 5165 CHECK(mOutputFormat->findInt32("sample-rate", &sampleRate)); 5166 if (mSampleRate != 0 && sampleRate != 0) { 5167 mEncoderDelay = mEncoderDelay * sampleRate / mSampleRate; 5168 mEncoderPadding = mEncoderPadding * sampleRate / mSampleRate; 5169 mSampleRate = sampleRate; 5170 } 5171 if (mSkipCutBuffer != NULL) { 5172 size_t prevbufsize = mSkipCutBuffer->size(); 5173 if (prevbufsize != 0) { 5174 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 5175 } 5176 } 5177 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 5178 } 5179 5180 sp<AMessage> notify = mNotify->dup(); 5181 notify->setInt32("what", kWhatOutputFormatChanged); 5182 notify->setMessage("format", mOutputFormat); 5183 notify->post(); 5184 5185 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 5186 mLastOutputFormat = mOutputFormat; 5187} 5188 5189void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 5190 sp<AMessage> notify = mNotify->dup(); 5191 notify->setInt32("what", CodecBase::kWhatError); 5192 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 5193 5194 if (internalError == UNKNOWN_ERROR) { // find better error code 5195 const status_t omxStatus = statusFromOMXError(error); 5196 if (omxStatus != 0) { 5197 internalError = omxStatus; 5198 } else { 5199 ALOGW("Invalid OMX error %#x", error); 5200 } 5201 } 5202 5203 mFatalError = true; 5204 5205 notify->setInt32("err", internalError); 5206 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5207 notify->post(); 5208} 5209 5210//////////////////////////////////////////////////////////////////////////////// 5211 5212ACodec::PortDescription::PortDescription() { 5213} 5214 5215status_t ACodec::requestIDRFrame() { 5216 if (!mIsEncoder) { 5217 return ERROR_UNSUPPORTED; 5218 } 5219 5220 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5221 InitOMXParams(¶ms); 5222 5223 params.nPortIndex = kPortIndexOutput; 5224 params.IntraRefreshVOP = OMX_TRUE; 5225 5226 return mOMX->setConfig( 5227 mNode, 5228 OMX_IndexConfigVideoIntraVOPRefresh, 5229 ¶ms, 5230 sizeof(params)); 5231} 5232 5233void ACodec::PortDescription::addBuffer( 5234 IOMX::buffer_id id, const sp<ABuffer> &buffer, 5235 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) { 5236 mBufferIDs.push_back(id); 5237 mBuffers.push_back(buffer); 5238 mHandles.push_back(handle); 5239 mMemRefs.push_back(memRef); 5240} 5241 5242size_t ACodec::PortDescription::countBuffers() { 5243 return mBufferIDs.size(); 5244} 5245 5246IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5247 return mBufferIDs.itemAt(index); 5248} 5249 5250sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5251 return mBuffers.itemAt(index); 5252} 5253 5254sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const { 5255 return mHandles.itemAt(index); 5256} 5257 5258sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const { 5259 return mMemRefs.itemAt(index); 5260} 5261 5262//////////////////////////////////////////////////////////////////////////////// 5263 5264ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5265 : AState(parentState), 5266 mCodec(codec) { 5267} 5268 5269ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5270 OMX_U32 /* portIndex */) { 5271 return KEEP_BUFFERS; 5272} 5273 5274bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5275 switch (msg->what()) { 5276 case kWhatInputBufferFilled: 5277 { 5278 onInputBufferFilled(msg); 5279 break; 5280 } 5281 5282 case kWhatOutputBufferDrained: 5283 { 5284 onOutputBufferDrained(msg); 5285 break; 5286 } 5287 5288 case ACodec::kWhatOMXMessageList: 5289 { 5290 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5291 } 5292 5293 case ACodec::kWhatOMXMessageItem: 5294 { 5295 // no need to check as we already did it for kWhatOMXMessageList 5296 return onOMXMessage(msg); 5297 } 5298 5299 case ACodec::kWhatOMXMessage: 5300 { 5301 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5302 } 5303 5304 case ACodec::kWhatSetSurface: 5305 { 5306 sp<AReplyToken> replyID; 5307 CHECK(msg->senderAwaitsResponse(&replyID)); 5308 5309 sp<RefBase> obj; 5310 CHECK(msg->findObject("surface", &obj)); 5311 5312 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5313 5314 sp<AMessage> response = new AMessage; 5315 response->setInt32("err", err); 5316 response->postReply(replyID); 5317 break; 5318 } 5319 5320 case ACodec::kWhatCreateInputSurface: 5321 case ACodec::kWhatSetInputSurface: 5322 case ACodec::kWhatSignalEndOfInputStream: 5323 { 5324 // This may result in an app illegal state exception. 5325 ALOGE("Message 0x%x was not handled", msg->what()); 5326 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5327 return true; 5328 } 5329 5330 case ACodec::kWhatOMXDied: 5331 { 5332 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5333 ALOGE("OMX/mediaserver died, signalling error!"); 5334 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5335 break; 5336 } 5337 5338 case ACodec::kWhatReleaseCodecInstance: 5339 { 5340 ALOGI("[%s] forcing the release of codec", 5341 mCodec->mComponentName.c_str()); 5342 status_t err = mCodec->mOMX->freeNode(mCodec->mNode); 5343 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5344 mCodec->mComponentName.c_str(), err); 5345 sp<AMessage> notify = mCodec->mNotify->dup(); 5346 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5347 notify->post(); 5348 break; 5349 } 5350 5351 default: 5352 return false; 5353 } 5354 5355 return true; 5356} 5357 5358bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5359 // there is a possibility that this is an outstanding message for a 5360 // codec that we have already destroyed 5361 if (mCodec->mNode == 0) { 5362 ALOGI("ignoring message as already freed component: %s", 5363 msg->debugString().c_str()); 5364 return false; 5365 } 5366 5367 IOMX::node_id nodeID; 5368 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 5369 if (nodeID != mCodec->mNode) { 5370 ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode); 5371 return false; 5372 } 5373 return true; 5374} 5375 5376bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5377 sp<RefBase> obj; 5378 CHECK(msg->findObject("messages", &obj)); 5379 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5380 5381 bool receivedRenderedEvents = false; 5382 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5383 it != msgList->getList().cend(); ++it) { 5384 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5385 mCodec->handleMessage(*it); 5386 int32_t type; 5387 CHECK((*it)->findInt32("type", &type)); 5388 if (type == omx_message::FRAME_RENDERED) { 5389 receivedRenderedEvents = true; 5390 } 5391 } 5392 5393 if (receivedRenderedEvents) { 5394 // NOTE: all buffers are rendered in this case 5395 mCodec->notifyOfRenderedFrames(); 5396 } 5397 return true; 5398} 5399 5400bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5401 int32_t type; 5402 CHECK(msg->findInt32("type", &type)); 5403 5404 switch (type) { 5405 case omx_message::EVENT: 5406 { 5407 int32_t event, data1, data2; 5408 CHECK(msg->findInt32("event", &event)); 5409 CHECK(msg->findInt32("data1", &data1)); 5410 CHECK(msg->findInt32("data2", &data2)); 5411 5412 if (event == OMX_EventCmdComplete 5413 && data1 == OMX_CommandFlush 5414 && data2 == (int32_t)OMX_ALL) { 5415 // Use of this notification is not consistent across 5416 // implementations. We'll drop this notification and rely 5417 // on flush-complete notifications on the individual port 5418 // indices instead. 5419 5420 return true; 5421 } 5422 5423 return onOMXEvent( 5424 static_cast<OMX_EVENTTYPE>(event), 5425 static_cast<OMX_U32>(data1), 5426 static_cast<OMX_U32>(data2)); 5427 } 5428 5429 case omx_message::EMPTY_BUFFER_DONE: 5430 { 5431 IOMX::buffer_id bufferID; 5432 int32_t fenceFd; 5433 5434 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5435 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5436 5437 return onOMXEmptyBufferDone(bufferID, fenceFd); 5438 } 5439 5440 case omx_message::FILL_BUFFER_DONE: 5441 { 5442 IOMX::buffer_id bufferID; 5443 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5444 5445 int32_t rangeOffset, rangeLength, flags, fenceFd; 5446 int64_t timeUs; 5447 5448 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5449 CHECK(msg->findInt32("range_length", &rangeLength)); 5450 CHECK(msg->findInt32("flags", &flags)); 5451 CHECK(msg->findInt64("timestamp", &timeUs)); 5452 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5453 5454 return onOMXFillBufferDone( 5455 bufferID, 5456 (size_t)rangeOffset, (size_t)rangeLength, 5457 (OMX_U32)flags, 5458 timeUs, 5459 fenceFd); 5460 } 5461 5462 case omx_message::FRAME_RENDERED: 5463 { 5464 int64_t mediaTimeUs, systemNano; 5465 5466 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5467 CHECK(msg->findInt64("system_nano", &systemNano)); 5468 5469 return onOMXFrameRendered( 5470 mediaTimeUs, systemNano); 5471 } 5472 5473 default: 5474 ALOGE("Unexpected message type: %d", type); 5475 return false; 5476 } 5477} 5478 5479bool ACodec::BaseState::onOMXFrameRendered( 5480 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5481 // ignore outside of Executing and PortSettingsChanged states 5482 return true; 5483} 5484 5485bool ACodec::BaseState::onOMXEvent( 5486 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5487 if (event == OMX_EventDataSpaceChanged) { 5488 ColorAspects aspects; 5489 aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF); 5490 aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF); 5491 aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF); 5492 aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF); 5493 5494 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5495 return true; 5496 } 5497 5498 if (event != OMX_EventError) { 5499 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5500 mCodec->mComponentName.c_str(), event, data1, data2); 5501 5502 return false; 5503 } 5504 5505 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5506 5507 // verify OMX component sends back an error we expect. 5508 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5509 if (!isOMXError(omxError)) { 5510 ALOGW("Invalid OMX error %#x", omxError); 5511 omxError = OMX_ErrorUndefined; 5512 } 5513 mCodec->signalError(omxError); 5514 5515 return true; 5516} 5517 5518bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5519 ALOGV("[%s] onOMXEmptyBufferDone %u", 5520 mCodec->mComponentName.c_str(), bufferID); 5521 5522 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5523 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5524 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5525 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5526 mCodec->dumpBuffers(kPortIndexInput); 5527 if (fenceFd >= 0) { 5528 ::close(fenceFd); 5529 } 5530 return false; 5531 } 5532 info->mStatus = BufferInfo::OWNED_BY_US; 5533 5534 // input buffers cannot take fences, so wait for any fence now 5535 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5536 fenceFd = -1; 5537 5538 // still save fence for completeness 5539 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5540 5541 // We're in "store-metadata-in-buffers" mode, the underlying 5542 // OMX component had access to data that's implicitly refcounted 5543 // by this "MediaBuffer" object. Now that the OMX component has 5544 // told us that it's done with the input buffer, we can decrement 5545 // the mediaBuffer's reference count. 5546 info->mData->setMediaBufferBase(NULL); 5547 5548 PortMode mode = getPortMode(kPortIndexInput); 5549 5550 switch (mode) { 5551 case KEEP_BUFFERS: 5552 break; 5553 5554 case RESUBMIT_BUFFERS: 5555 postFillThisBuffer(info); 5556 break; 5557 5558 case FREE_BUFFERS: 5559 default: 5560 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5561 return false; 5562 } 5563 5564 return true; 5565} 5566 5567void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5568 if (mCodec->mPortEOS[kPortIndexInput]) { 5569 return; 5570 } 5571 5572 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5573 5574 sp<AMessage> notify = mCodec->mNotify->dup(); 5575 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5576 notify->setInt32("buffer-id", info->mBufferID); 5577 5578 info->mData->meta()->clear(); 5579 notify->setBuffer("buffer", info->mData); 5580 5581 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5582 reply->setInt32("buffer-id", info->mBufferID); 5583 5584 notify->setMessage("reply", reply); 5585 5586 notify->post(); 5587 5588 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5589} 5590 5591void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5592 IOMX::buffer_id bufferID; 5593 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5594 sp<ABuffer> buffer; 5595 int32_t err = OK; 5596 bool eos = false; 5597 PortMode mode = getPortMode(kPortIndexInput); 5598 5599 if (!msg->findBuffer("buffer", &buffer)) { 5600 /* these are unfilled buffers returned by client */ 5601 CHECK(msg->findInt32("err", &err)); 5602 5603 if (err == OK) { 5604 /* buffers with no errors are returned on MediaCodec.flush */ 5605 mode = KEEP_BUFFERS; 5606 } else { 5607 ALOGV("[%s] saw error %d instead of an input buffer", 5608 mCodec->mComponentName.c_str(), err); 5609 eos = true; 5610 } 5611 5612 buffer.clear(); 5613 } 5614 5615 int32_t tmp; 5616 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5617 eos = true; 5618 err = ERROR_END_OF_STREAM; 5619 } 5620 5621 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5622 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5623 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5624 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5625 mCodec->dumpBuffers(kPortIndexInput); 5626 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5627 return; 5628 } 5629 5630 info->mStatus = BufferInfo::OWNED_BY_US; 5631 5632 switch (mode) { 5633 case KEEP_BUFFERS: 5634 { 5635 if (eos) { 5636 if (!mCodec->mPortEOS[kPortIndexInput]) { 5637 mCodec->mPortEOS[kPortIndexInput] = true; 5638 mCodec->mInputEOSResult = err; 5639 } 5640 } 5641 break; 5642 } 5643 5644 case RESUBMIT_BUFFERS: 5645 { 5646 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5647 // Do not send empty input buffer w/o EOS to the component. 5648 if (buffer->size() == 0 && !eos) { 5649 postFillThisBuffer(info); 5650 break; 5651 } 5652 5653 int64_t timeUs; 5654 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5655 5656 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5657 5658 MetadataBufferType metaType = mCodec->mInputMetadataType; 5659 int32_t isCSD = 0; 5660 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5661 if (mCodec->mIsLegacyVP9Decoder) { 5662 ALOGV("[%s] is legacy VP9 decoder. Ignore %u codec specific data", 5663 mCodec->mComponentName.c_str(), bufferID); 5664 postFillThisBuffer(info); 5665 break; 5666 } 5667 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5668 metaType = kMetadataBufferTypeInvalid; 5669 } 5670 5671 if (eos) { 5672 flags |= OMX_BUFFERFLAG_EOS; 5673 } 5674 5675 if (buffer != info->mCodecData) { 5676 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5677 mCodec->mComponentName.c_str(), 5678 bufferID, 5679 buffer.get(), info->mCodecData.get()); 5680 5681 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; 5682 if (converter == NULL || isCSD) { 5683 converter = getCopyConverter(); 5684 } 5685 status_t err = converter->convert(buffer, info->mCodecData); 5686 if (err != OK) { 5687 mCodec->signalError(OMX_ErrorUndefined, err); 5688 return; 5689 } 5690 } 5691 5692 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5693 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5694 mCodec->mComponentName.c_str(), bufferID); 5695 } else if (flags & OMX_BUFFERFLAG_EOS) { 5696 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5697 mCodec->mComponentName.c_str(), bufferID); 5698 } else { 5699#if TRACK_BUFFER_TIMING 5700 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5701 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5702#else 5703 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5704 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5705#endif 5706 } 5707 5708#if TRACK_BUFFER_TIMING 5709 ACodec::BufferStats stats; 5710 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5711 stats.mFillBufferDoneTimeUs = -1ll; 5712 mCodec->mBufferStats.add(timeUs, stats); 5713#endif 5714 5715 if (mCodec->storingMetadataInDecodedBuffers()) { 5716 // try to submit an output buffer for each input buffer 5717 PortMode outputMode = getPortMode(kPortIndexOutput); 5718 5719 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 5720 mCodec->mMetadataBuffersToSubmit, 5721 (outputMode == FREE_BUFFERS ? "FREE" : 5722 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 5723 if (outputMode == RESUBMIT_BUFFERS) { 5724 mCodec->submitOutputMetadataBuffer(); 5725 } 5726 } 5727 info->checkReadFence("onInputBufferFilled"); 5728 5729 status_t err2 = OK; 5730 switch (metaType) { 5731 case kMetadataBufferTypeInvalid: 5732 break; 5733#ifndef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5734 case kMetadataBufferTypeNativeHandleSource: 5735 if (info->mCodecData->size() >= sizeof(VideoNativeHandleMetadata)) { 5736 VideoNativeHandleMetadata *vnhmd = 5737 (VideoNativeHandleMetadata*)info->mCodecData->base(); 5738 err2 = mCodec->mOMX->updateNativeHandleInMeta( 5739 mCodec->mNode, kPortIndexInput, 5740 NativeHandle::create(vnhmd->pHandle, false /* ownsHandle */), 5741 bufferID); 5742 } 5743 break; 5744 case kMetadataBufferTypeANWBuffer: 5745 if (info->mCodecData->size() >= sizeof(VideoNativeMetadata)) { 5746 VideoNativeMetadata *vnmd = (VideoNativeMetadata*)info->mCodecData->base(); 5747 err2 = mCodec->mOMX->updateGraphicBufferInMeta( 5748 mCodec->mNode, kPortIndexInput, 5749 new GraphicBuffer(vnmd->pBuffer, false /* keepOwnership */), 5750 bufferID); 5751 } 5752 break; 5753#endif 5754 default: 5755 ALOGW("Can't marshall %s data in %zu sized buffers in %zu-bit mode", 5756 asString(metaType), info->mCodecData->size(), 5757 sizeof(buffer_handle_t) * 8); 5758 err2 = ERROR_UNSUPPORTED; 5759 break; 5760 } 5761 5762 if (err2 == OK) { 5763 err2 = mCodec->mOMX->emptyBuffer( 5764 mCodec->mNode, 5765 bufferID, 5766 0, 5767 info->mCodecData->size(), 5768 flags, 5769 timeUs, 5770 info->mFenceFd); 5771 } 5772 info->mFenceFd = -1; 5773 if (err2 != OK) { 5774 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5775 return; 5776 } 5777 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5778 5779 if (!eos && err == OK) { 5780 getMoreInputDataIfPossible(); 5781 } else { 5782 ALOGV("[%s] Signalled EOS (%d) on the input port", 5783 mCodec->mComponentName.c_str(), err); 5784 5785 mCodec->mPortEOS[kPortIndexInput] = true; 5786 mCodec->mInputEOSResult = err; 5787 } 5788 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 5789 if (err != OK && err != ERROR_END_OF_STREAM) { 5790 ALOGV("[%s] Signalling EOS on the input port due to error %d", 5791 mCodec->mComponentName.c_str(), err); 5792 } else { 5793 ALOGV("[%s] Signalling EOS on the input port", 5794 mCodec->mComponentName.c_str()); 5795 } 5796 5797 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 5798 mCodec->mComponentName.c_str(), bufferID); 5799 5800 info->checkReadFence("onInputBufferFilled"); 5801 status_t err2 = mCodec->mOMX->emptyBuffer( 5802 mCodec->mNode, 5803 bufferID, 5804 0, 5805 0, 5806 OMX_BUFFERFLAG_EOS, 5807 0, 5808 info->mFenceFd); 5809 info->mFenceFd = -1; 5810 if (err2 != OK) { 5811 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5812 return; 5813 } 5814 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5815 5816 mCodec->mPortEOS[kPortIndexInput] = true; 5817 mCodec->mInputEOSResult = err; 5818 } 5819 break; 5820 } 5821 5822 case FREE_BUFFERS: 5823 break; 5824 5825 default: 5826 ALOGE("invalid port mode: %d", mode); 5827 break; 5828 } 5829} 5830 5831void ACodec::BaseState::getMoreInputDataIfPossible() { 5832 if (mCodec->mPortEOS[kPortIndexInput]) { 5833 return; 5834 } 5835 5836 BufferInfo *eligible = NULL; 5837 5838 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 5839 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5840 5841#if 0 5842 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 5843 // There's already a "read" pending. 5844 return; 5845 } 5846#endif 5847 5848 if (info->mStatus == BufferInfo::OWNED_BY_US) { 5849 eligible = info; 5850 } 5851 } 5852 5853 if (eligible == NULL) { 5854 return; 5855 } 5856 5857 postFillThisBuffer(eligible); 5858} 5859 5860bool ACodec::BaseState::onOMXFillBufferDone( 5861 IOMX::buffer_id bufferID, 5862 size_t rangeOffset, size_t rangeLength, 5863 OMX_U32 flags, 5864 int64_t timeUs, 5865 int fenceFd) { 5866 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 5867 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 5868 5869 ssize_t index; 5870 status_t err= OK; 5871 5872#if TRACK_BUFFER_TIMING 5873 index = mCodec->mBufferStats.indexOfKey(timeUs); 5874 if (index >= 0) { 5875 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 5876 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 5877 5878 ALOGI("frame PTS %lld: %lld", 5879 timeUs, 5880 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 5881 5882 mCodec->mBufferStats.removeItemsAt(index); 5883 stats = NULL; 5884 } 5885#endif 5886 5887 BufferInfo *info = 5888 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5889 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5890 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5891 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5892 mCodec->dumpBuffers(kPortIndexOutput); 5893 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5894 if (fenceFd >= 0) { 5895 ::close(fenceFd); 5896 } 5897 return true; 5898 } 5899 5900 info->mDequeuedAt = ++mCodec->mDequeueCounter; 5901 info->mStatus = BufferInfo::OWNED_BY_US; 5902 5903 if (info->mRenderInfo != NULL) { 5904 // The fence for an emptied buffer must have signaled, but there still could be queued 5905 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 5906 // as we will soon requeue this buffer to the surface. While in theory we could still keep 5907 // track of buffers that are requeued to the surface, it is better to add support to the 5908 // buffer-queue to notify us of released buffers and their fences (in the future). 5909 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 5910 } 5911 5912 // byte buffers cannot take fences, so wait for any fence now 5913 if (mCodec->mNativeWindow == NULL) { 5914 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 5915 fenceFd = -1; 5916 } 5917 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 5918 5919 PortMode mode = getPortMode(kPortIndexOutput); 5920 5921 switch (mode) { 5922 case KEEP_BUFFERS: 5923 break; 5924 5925 case RESUBMIT_BUFFERS: 5926 { 5927 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 5928 || mCodec->mPortEOS[kPortIndexOutput])) { 5929 ALOGV("[%s] calling fillBuffer %u", 5930 mCodec->mComponentName.c_str(), info->mBufferID); 5931 5932 err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 5933 info->mFenceFd = -1; 5934 if (err != OK) { 5935 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5936 return true; 5937 } 5938 5939 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5940 break; 5941 } 5942 5943 sp<AMessage> reply = 5944 new AMessage(kWhatOutputBufferDrained, mCodec); 5945 5946 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 5947 // pretend that output format has changed on the first frame (we used to do this) 5948 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) { 5949 mCodec->onOutputFormatChanged(mCodec->mOutputFormat); 5950 } 5951 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 5952 mCodec->sendFormatChange(); 5953 } else if (rangeLength > 0 && mCodec->mNativeWindow != NULL) { 5954 // If potentially rendering onto a surface, always save key format data (crop & 5955 // data space) so that we can set it if and once the buffer is rendered. 5956 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 5957 } 5958 5959 if (mCodec->usingMetadataOnEncoderOutput()) { 5960 native_handle_t *handle = NULL; 5961 VideoNativeHandleMetadata &nativeMeta = 5962 *(VideoNativeHandleMetadata *)info->mData->data(); 5963 if (info->mData->size() >= sizeof(nativeMeta) 5964 && nativeMeta.eType == kMetadataBufferTypeNativeHandleSource) { 5965#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5966 // handle is only valid on 32-bit/mediaserver process 5967 handle = NULL; 5968#else 5969 handle = (native_handle_t *)nativeMeta.pHandle; 5970#endif 5971 } 5972 info->mData->meta()->setPointer("handle", handle); 5973 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 5974 info->mData->meta()->setInt32("rangeLength", rangeLength); 5975 } else if (info->mData == info->mCodecData) { 5976 info->mData->setRange(rangeOffset, rangeLength); 5977 } else { 5978 info->mCodecData->setRange(rangeOffset, rangeLength); 5979 // in this case we know that mConverter is not null 5980 status_t err = mCodec->mConverter[kPortIndexOutput]->convert( 5981 info->mCodecData, info->mData); 5982 if (err != OK) { 5983 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5984 return true; 5985 } 5986 } 5987#if 0 5988 if (mCodec->mNativeWindow == NULL) { 5989 if (IsIDR(info->mData)) { 5990 ALOGI("IDR frame"); 5991 } 5992 } 5993#endif 5994 5995 if (mCodec->mSkipCutBuffer != NULL) { 5996 mCodec->mSkipCutBuffer->submit(info->mData); 5997 } 5998 info->mData->meta()->setInt64("timeUs", timeUs); 5999 6000 sp<AMessage> notify = mCodec->mNotify->dup(); 6001 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 6002 notify->setInt32("buffer-id", info->mBufferID); 6003 notify->setBuffer("buffer", info->mData); 6004 notify->setInt32("flags", flags); 6005 6006 reply->setInt32("buffer-id", info->mBufferID); 6007 6008 notify->setMessage("reply", reply); 6009 6010 notify->post(); 6011 6012 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 6013 6014 if (flags & OMX_BUFFERFLAG_EOS) { 6015 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 6016 6017 sp<AMessage> notify = mCodec->mNotify->dup(); 6018 notify->setInt32("what", CodecBase::kWhatEOS); 6019 notify->setInt32("err", mCodec->mInputEOSResult); 6020 notify->post(); 6021 6022 mCodec->mPortEOS[kPortIndexOutput] = true; 6023 } 6024 break; 6025 } 6026 6027 case FREE_BUFFERS: 6028 err = mCodec->freeBuffer(kPortIndexOutput, index); 6029 if (err != OK) { 6030 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6031 return true; 6032 } 6033 break; 6034 6035 default: 6036 ALOGE("Invalid port mode: %d", mode); 6037 return false; 6038 } 6039 6040 return true; 6041} 6042 6043void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 6044 IOMX::buffer_id bufferID; 6045 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 6046 ssize_t index; 6047 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6048 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6049 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 6050 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6051 mCodec->dumpBuffers(kPortIndexOutput); 6052 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6053 return; 6054 } 6055 6056 android_native_rect_t crop; 6057 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom) 6058 && memcmp(&crop, &mCodec->mLastNativeWindowCrop, sizeof(crop)) != 0) { 6059 mCodec->mLastNativeWindowCrop = crop; 6060 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 6061 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 6062 } 6063 6064 int32_t dataSpace; 6065 if (msg->findInt32("dataspace", &dataSpace) 6066 && dataSpace != mCodec->mLastNativeWindowDataSpace) { 6067 status_t err = native_window_set_buffers_data_space( 6068 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 6069 mCodec->mLastNativeWindowDataSpace = dataSpace; 6070 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 6071 } 6072 6073 int32_t render; 6074 if (mCodec->mNativeWindow != NULL 6075 && msg->findInt32("render", &render) && render != 0 6076 && info->mData != NULL && info->mData->size() != 0) { 6077 ATRACE_NAME("render"); 6078 // The client wants this buffer to be rendered. 6079 6080 // save buffers sent to the surface so we can get render time when they return 6081 int64_t mediaTimeUs = -1; 6082 info->mData->meta()->findInt64("timeUs", &mediaTimeUs); 6083 if (mediaTimeUs >= 0) { 6084 mCodec->mRenderTracker.onFrameQueued( 6085 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 6086 } 6087 6088 int64_t timestampNs = 0; 6089 if (!msg->findInt64("timestampNs", ×tampNs)) { 6090 // use media timestamp if client did not request a specific render timestamp 6091 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 6092 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 6093 timestampNs *= 1000; 6094 } 6095 } 6096 6097 status_t err; 6098 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 6099 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 6100 6101 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 6102 err = mCodec->mNativeWindow->queueBuffer( 6103 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 6104 info->mFenceFd = -1; 6105 if (err == OK) { 6106 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 6107 } else { 6108 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 6109 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6110 info->mStatus = BufferInfo::OWNED_BY_US; 6111 // keeping read fence as write fence to avoid clobbering 6112 info->mIsReadFence = false; 6113 } 6114 } else { 6115 if (mCodec->mNativeWindow != NULL && 6116 (info->mData == NULL || info->mData->size() != 0)) { 6117 // move read fence into write fence to avoid clobbering 6118 info->mIsReadFence = false; 6119 ATRACE_NAME("frame-drop"); 6120 } 6121 info->mStatus = BufferInfo::OWNED_BY_US; 6122 } 6123 6124 PortMode mode = getPortMode(kPortIndexOutput); 6125 6126 switch (mode) { 6127 case KEEP_BUFFERS: 6128 { 6129 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 6130 6131 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6132 // We cannot resubmit the buffer we just rendered, dequeue 6133 // the spare instead. 6134 6135 info = mCodec->dequeueBufferFromNativeWindow(); 6136 } 6137 break; 6138 } 6139 6140 case RESUBMIT_BUFFERS: 6141 { 6142 if (!mCodec->mPortEOS[kPortIndexOutput]) { 6143 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6144 // We cannot resubmit the buffer we just rendered, dequeue 6145 // the spare instead. 6146 6147 info = mCodec->dequeueBufferFromNativeWindow(); 6148 } 6149 6150 if (info != NULL) { 6151 ALOGV("[%s] calling fillBuffer %u", 6152 mCodec->mComponentName.c_str(), info->mBufferID); 6153 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 6154 status_t err = mCodec->mOMX->fillBuffer( 6155 mCodec->mNode, info->mBufferID, info->mFenceFd); 6156 info->mFenceFd = -1; 6157 if (err == OK) { 6158 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6159 } else { 6160 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6161 } 6162 } 6163 } 6164 break; 6165 } 6166 6167 case FREE_BUFFERS: 6168 { 6169 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 6170 if (err != OK) { 6171 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6172 } 6173 break; 6174 } 6175 6176 default: 6177 ALOGE("Invalid port mode: %d", mode); 6178 return; 6179 } 6180} 6181 6182//////////////////////////////////////////////////////////////////////////////// 6183 6184ACodec::UninitializedState::UninitializedState(ACodec *codec) 6185 : BaseState(codec) { 6186} 6187 6188void ACodec::UninitializedState::stateEntered() { 6189 ALOGV("Now uninitialized"); 6190 6191 if (mDeathNotifier != NULL) { 6192 mCodec->mNodeBinder->unlinkToDeath(mDeathNotifier); 6193 mDeathNotifier.clear(); 6194 } 6195 6196 mCodec->mUsingNativeWindow = false; 6197 mCodec->mNativeWindow.clear(); 6198 mCodec->mNativeWindowUsageBits = 0; 6199 mCodec->mNode = 0; 6200 mCodec->mOMX.clear(); 6201 mCodec->mQuirks = 0; 6202 mCodec->mFlags = 0; 6203 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 6204 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 6205 mCodec->mConverter[0].clear(); 6206 mCodec->mConverter[1].clear(); 6207 mCodec->mComponentName.clear(); 6208} 6209 6210bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 6211 bool handled = false; 6212 6213 switch (msg->what()) { 6214 case ACodec::kWhatSetup: 6215 { 6216 onSetup(msg); 6217 6218 handled = true; 6219 break; 6220 } 6221 6222 case ACodec::kWhatAllocateComponent: 6223 { 6224 onAllocateComponent(msg); 6225 handled = true; 6226 break; 6227 } 6228 6229 case ACodec::kWhatShutdown: 6230 { 6231 int32_t keepComponentAllocated; 6232 CHECK(msg->findInt32( 6233 "keepComponentAllocated", &keepComponentAllocated)); 6234 ALOGW_IF(keepComponentAllocated, 6235 "cannot keep component allocated on shutdown in Uninitialized state"); 6236 6237 sp<AMessage> notify = mCodec->mNotify->dup(); 6238 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6239 notify->post(); 6240 6241 handled = true; 6242 break; 6243 } 6244 6245 case ACodec::kWhatFlush: 6246 { 6247 sp<AMessage> notify = mCodec->mNotify->dup(); 6248 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6249 notify->post(); 6250 6251 handled = true; 6252 break; 6253 } 6254 6255 case ACodec::kWhatReleaseCodecInstance: 6256 { 6257 // nothing to do, as we have already signaled shutdown 6258 handled = true; 6259 break; 6260 } 6261 6262 default: 6263 return BaseState::onMessageReceived(msg); 6264 } 6265 6266 return handled; 6267} 6268 6269void ACodec::UninitializedState::onSetup( 6270 const sp<AMessage> &msg) { 6271 if (onAllocateComponent(msg) 6272 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6273 mCodec->mLoadedState->onStart(); 6274 } 6275} 6276 6277bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6278 ALOGV("onAllocateComponent"); 6279 6280 CHECK(mCodec->mNode == 0); 6281 6282 OMXClient client; 6283 if (client.connect() != OK) { 6284 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6285 return false; 6286 } 6287 6288 sp<IOMX> omx = client.interface(); 6289 6290 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6291 6292 Vector<AString> matchingCodecs; 6293 6294 AString mime; 6295 6296 AString componentName; 6297 uint32_t quirks = 0; 6298 int32_t encoder = false; 6299 if (msg->findString("componentName", &componentName)) { 6300 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6301 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6302 matchingCodecs.add(componentName); 6303 } 6304 } else { 6305 CHECK(msg->findString("mime", &mime)); 6306 6307 if (!msg->findInt32("encoder", &encoder)) { 6308 encoder = false; 6309 } 6310 6311 MediaCodecList::findMatchingCodecs( 6312 mime.c_str(), 6313 encoder, // createEncoder 6314 0, // flags 6315 &matchingCodecs); 6316 } 6317 6318 sp<CodecObserver> observer = new CodecObserver; 6319 IOMX::node_id node = 0; 6320 6321 status_t err = NAME_NOT_FOUND; 6322 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6323 ++matchIndex) { 6324 componentName = matchingCodecs[matchIndex]; 6325 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6326 6327 pid_t tid = gettid(); 6328 int prevPriority = androidGetThreadPriority(tid); 6329 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6330 err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node); 6331 androidSetThreadPriority(tid, prevPriority); 6332 6333 if (err == OK) { 6334 break; 6335 } else { 6336 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6337 } 6338 6339 node = 0; 6340 } 6341 6342 if (node == 0) { 6343 if (!mime.empty()) { 6344 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6345 encoder ? "en" : "de", mime.c_str(), err); 6346 } else { 6347 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6348 } 6349 6350 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6351 return false; 6352 } 6353 6354 mDeathNotifier = new DeathNotifier(notify); 6355 if (mCodec->mNodeBinder == NULL || 6356 mCodec->mNodeBinder->linkToDeath(mDeathNotifier) != OK) { 6357 // This was a local binder, if it dies so do we, we won't care 6358 // about any notifications in the afterlife. 6359 mDeathNotifier.clear(); 6360 } 6361 6362 notify = new AMessage(kWhatOMXMessageList, mCodec); 6363 observer->setNotificationMessage(notify); 6364 6365 mCodec->mComponentName = componentName; 6366 mCodec->mRenderTracker.setComponentName(componentName); 6367 mCodec->mFlags = 0; 6368 6369 if (componentName.endsWith(".secure")) { 6370 mCodec->mFlags |= kFlagIsSecure; 6371 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6372 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6373 } 6374 6375 mCodec->mQuirks = quirks; 6376 mCodec->mOMX = omx; 6377 mCodec->mNode = node; 6378 6379 { 6380 sp<AMessage> notify = mCodec->mNotify->dup(); 6381 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6382 notify->setString("componentName", mCodec->mComponentName.c_str()); 6383 notify->post(); 6384 } 6385 6386 mCodec->changeState(mCodec->mLoadedState); 6387 6388 return true; 6389} 6390 6391//////////////////////////////////////////////////////////////////////////////// 6392 6393ACodec::LoadedState::LoadedState(ACodec *codec) 6394 : BaseState(codec) { 6395} 6396 6397void ACodec::LoadedState::stateEntered() { 6398 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6399 6400 mCodec->mPortEOS[kPortIndexInput] = 6401 mCodec->mPortEOS[kPortIndexOutput] = false; 6402 6403 mCodec->mInputEOSResult = OK; 6404 6405 mCodec->mDequeueCounter = 0; 6406 mCodec->mMetadataBuffersToSubmit = 0; 6407 mCodec->mRepeatFrameDelayUs = -1ll; 6408 mCodec->mInputFormat.clear(); 6409 mCodec->mOutputFormat.clear(); 6410 mCodec->mBaseOutputFormat.clear(); 6411 6412 if (mCodec->mShutdownInProgress) { 6413 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6414 6415 mCodec->mShutdownInProgress = false; 6416 mCodec->mKeepComponentAllocated = false; 6417 6418 onShutdown(keepComponentAllocated); 6419 } 6420 mCodec->mExplicitShutdown = false; 6421 6422 mCodec->processDeferredMessages(); 6423} 6424 6425void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6426 if (!keepComponentAllocated) { 6427 (void)mCodec->mOMX->freeNode(mCodec->mNode); 6428 6429 mCodec->changeState(mCodec->mUninitializedState); 6430 } 6431 6432 if (mCodec->mExplicitShutdown) { 6433 sp<AMessage> notify = mCodec->mNotify->dup(); 6434 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6435 notify->post(); 6436 mCodec->mExplicitShutdown = false; 6437 } 6438} 6439 6440bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6441 bool handled = false; 6442 6443 switch (msg->what()) { 6444 case ACodec::kWhatConfigureComponent: 6445 { 6446 onConfigureComponent(msg); 6447 handled = true; 6448 break; 6449 } 6450 6451 case ACodec::kWhatCreateInputSurface: 6452 { 6453 onCreateInputSurface(msg); 6454 handled = true; 6455 break; 6456 } 6457 6458 case ACodec::kWhatSetInputSurface: 6459 { 6460 onSetInputSurface(msg); 6461 handled = true; 6462 break; 6463 } 6464 6465 case ACodec::kWhatStart: 6466 { 6467 onStart(); 6468 handled = true; 6469 break; 6470 } 6471 6472 case ACodec::kWhatShutdown: 6473 { 6474 int32_t keepComponentAllocated; 6475 CHECK(msg->findInt32( 6476 "keepComponentAllocated", &keepComponentAllocated)); 6477 6478 mCodec->mExplicitShutdown = true; 6479 onShutdown(keepComponentAllocated); 6480 6481 handled = true; 6482 break; 6483 } 6484 6485 case ACodec::kWhatFlush: 6486 { 6487 sp<AMessage> notify = mCodec->mNotify->dup(); 6488 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6489 notify->post(); 6490 6491 handled = true; 6492 break; 6493 } 6494 6495 default: 6496 return BaseState::onMessageReceived(msg); 6497 } 6498 6499 return handled; 6500} 6501 6502bool ACodec::LoadedState::onConfigureComponent( 6503 const sp<AMessage> &msg) { 6504 ALOGV("onConfigureComponent"); 6505 6506 CHECK(mCodec->mNode != 0); 6507 6508 status_t err = OK; 6509 AString mime; 6510 if (!msg->findString("mime", &mime)) { 6511 err = BAD_VALUE; 6512 } else { 6513 err = mCodec->configureCodec(mime.c_str(), msg); 6514 } 6515 if (err != OK) { 6516 ALOGE("[%s] configureCodec returning error %d", 6517 mCodec->mComponentName.c_str(), err); 6518 6519 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6520 return false; 6521 } 6522 6523 { 6524 sp<AMessage> notify = mCodec->mNotify->dup(); 6525 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6526 notify->setMessage("input-format", mCodec->mInputFormat); 6527 notify->setMessage("output-format", mCodec->mOutputFormat); 6528 notify->post(); 6529 } 6530 6531 return true; 6532} 6533 6534status_t ACodec::LoadedState::setupInputSurface() { 6535 status_t err = OK; 6536 6537 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6538 err = mCodec->mOMX->setInternalOption( 6539 mCodec->mNode, 6540 kPortIndexInput, 6541 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 6542 &mCodec->mRepeatFrameDelayUs, 6543 sizeof(mCodec->mRepeatFrameDelayUs)); 6544 6545 if (err != OK) { 6546 ALOGE("[%s] Unable to configure option to repeat previous " 6547 "frames (err %d)", 6548 mCodec->mComponentName.c_str(), 6549 err); 6550 return err; 6551 } 6552 } 6553 6554 if (mCodec->mMaxPtsGapUs > 0ll) { 6555 err = mCodec->mOMX->setInternalOption( 6556 mCodec->mNode, 6557 kPortIndexInput, 6558 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 6559 &mCodec->mMaxPtsGapUs, 6560 sizeof(mCodec->mMaxPtsGapUs)); 6561 6562 if (err != OK) { 6563 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6564 mCodec->mComponentName.c_str(), 6565 err); 6566 return err; 6567 } 6568 } 6569 6570 if (mCodec->mMaxFps > 0) { 6571 err = mCodec->mOMX->setInternalOption( 6572 mCodec->mNode, 6573 kPortIndexInput, 6574 IOMX::INTERNAL_OPTION_MAX_FPS, 6575 &mCodec->mMaxFps, 6576 sizeof(mCodec->mMaxFps)); 6577 6578 if (err != OK) { 6579 ALOGE("[%s] Unable to configure max fps (err %d)", 6580 mCodec->mComponentName.c_str(), 6581 err); 6582 return err; 6583 } 6584 } 6585 6586 if (mCodec->mTimePerCaptureUs > 0ll 6587 && mCodec->mTimePerFrameUs > 0ll) { 6588 int64_t timeLapse[2]; 6589 timeLapse[0] = mCodec->mTimePerFrameUs; 6590 timeLapse[1] = mCodec->mTimePerCaptureUs; 6591 err = mCodec->mOMX->setInternalOption( 6592 mCodec->mNode, 6593 kPortIndexInput, 6594 IOMX::INTERNAL_OPTION_TIME_LAPSE, 6595 &timeLapse[0], 6596 sizeof(timeLapse)); 6597 6598 if (err != OK) { 6599 ALOGE("[%s] Unable to configure time lapse (err %d)", 6600 mCodec->mComponentName.c_str(), 6601 err); 6602 return err; 6603 } 6604 } 6605 6606 if (mCodec->mCreateInputBuffersSuspended) { 6607 bool suspend = true; 6608 err = mCodec->mOMX->setInternalOption( 6609 mCodec->mNode, 6610 kPortIndexInput, 6611 IOMX::INTERNAL_OPTION_SUSPEND, 6612 &suspend, 6613 sizeof(suspend)); 6614 6615 if (err != OK) { 6616 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6617 mCodec->mComponentName.c_str(), 6618 err); 6619 return err; 6620 } 6621 } 6622 6623 uint32_t usageBits; 6624 if (mCodec->mOMX->getParameter( 6625 mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6626 &usageBits, sizeof(usageBits)) == OK) { 6627 mCodec->mInputFormat->setInt32( 6628 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6629 } 6630 6631 sp<ABuffer> colorAspectsBuffer; 6632 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6633 err = mCodec->mOMX->setInternalOption( 6634 mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS, 6635 colorAspectsBuffer->base(), colorAspectsBuffer->capacity()); 6636 if (err != OK) { 6637 ALOGE("[%s] Unable to configure color aspects (err %d)", 6638 mCodec->mComponentName.c_str(), err); 6639 return err; 6640 } 6641 } 6642 return OK; 6643} 6644 6645void ACodec::LoadedState::onCreateInputSurface( 6646 const sp<AMessage> & /* msg */) { 6647 ALOGV("onCreateInputSurface"); 6648 6649 sp<AMessage> notify = mCodec->mNotify->dup(); 6650 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6651 6652 android_dataspace dataSpace; 6653 status_t err = 6654 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6655 notify->setMessage("input-format", mCodec->mInputFormat); 6656 notify->setMessage("output-format", mCodec->mOutputFormat); 6657 6658 sp<IGraphicBufferProducer> bufferProducer; 6659 if (err == OK) { 6660 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6661 err = mCodec->mOMX->createInputSurface( 6662 mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, 6663 &mCodec->mInputMetadataType); 6664 // framework uses ANW buffers internally instead of gralloc handles 6665 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6666 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6667 } 6668 } 6669 6670 if (err == OK) { 6671 err = setupInputSurface(); 6672 } 6673 6674 if (err == OK) { 6675 notify->setObject("input-surface", 6676 new BufferProducerWrapper(bufferProducer)); 6677 } else { 6678 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6679 // the error through because it's in the "configured" state. We 6680 // send a kWhatInputSurfaceCreated with an error value instead. 6681 ALOGE("[%s] onCreateInputSurface returning error %d", 6682 mCodec->mComponentName.c_str(), err); 6683 notify->setInt32("err", err); 6684 } 6685 notify->post(); 6686} 6687 6688void ACodec::LoadedState::onSetInputSurface( 6689 const sp<AMessage> &msg) { 6690 ALOGV("onSetInputSurface"); 6691 6692 sp<AMessage> notify = mCodec->mNotify->dup(); 6693 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6694 6695 sp<RefBase> obj; 6696 CHECK(msg->findObject("input-surface", &obj)); 6697 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6698 6699 android_dataspace dataSpace; 6700 status_t err = 6701 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6702 notify->setMessage("input-format", mCodec->mInputFormat); 6703 notify->setMessage("output-format", mCodec->mOutputFormat); 6704 6705 if (err == OK) { 6706 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6707 err = mCodec->mOMX->setInputSurface( 6708 mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(), 6709 &mCodec->mInputMetadataType); 6710 // framework uses ANW buffers internally instead of gralloc handles 6711 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6712 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6713 } 6714 } 6715 6716 if (err == OK) { 6717 surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace); 6718 err = setupInputSurface(); 6719 } 6720 6721 if (err != OK) { 6722 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6723 // the error through because it's in the "configured" state. We 6724 // send a kWhatInputSurfaceAccepted with an error value instead. 6725 ALOGE("[%s] onSetInputSurface returning error %d", 6726 mCodec->mComponentName.c_str(), err); 6727 notify->setInt32("err", err); 6728 } 6729 notify->post(); 6730} 6731 6732void ACodec::LoadedState::onStart() { 6733 ALOGV("onStart"); 6734 6735 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6736 if (err != OK) { 6737 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6738 } else { 6739 mCodec->changeState(mCodec->mLoadedToIdleState); 6740 } 6741} 6742 6743//////////////////////////////////////////////////////////////////////////////// 6744 6745ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 6746 : BaseState(codec) { 6747} 6748 6749void ACodec::LoadedToIdleState::stateEntered() { 6750 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 6751 6752 status_t err; 6753 if ((err = allocateBuffers()) != OK) { 6754 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 6755 "(error 0x%08x)", 6756 err); 6757 6758 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6759 6760 mCodec->mOMX->sendCommand( 6761 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 6762 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 6763 mCodec->freeBuffersOnPort(kPortIndexInput); 6764 } 6765 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 6766 mCodec->freeBuffersOnPort(kPortIndexOutput); 6767 } 6768 6769 mCodec->changeState(mCodec->mLoadedState); 6770 } 6771} 6772 6773status_t ACodec::LoadedToIdleState::allocateBuffers() { 6774 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 6775 6776 if (err != OK) { 6777 return err; 6778 } 6779 6780 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 6781} 6782 6783bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 6784 switch (msg->what()) { 6785 case kWhatSetParameters: 6786 case kWhatShutdown: 6787 { 6788 mCodec->deferMessage(msg); 6789 return true; 6790 } 6791 6792 case kWhatSignalEndOfInputStream: 6793 { 6794 mCodec->onSignalEndOfInputStream(); 6795 return true; 6796 } 6797 6798 case kWhatResume: 6799 { 6800 // We'll be active soon enough. 6801 return true; 6802 } 6803 6804 case kWhatFlush: 6805 { 6806 // We haven't even started yet, so we're flushed alright... 6807 sp<AMessage> notify = mCodec->mNotify->dup(); 6808 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6809 notify->post(); 6810 return true; 6811 } 6812 6813 default: 6814 return BaseState::onMessageReceived(msg); 6815 } 6816} 6817 6818bool ACodec::LoadedToIdleState::onOMXEvent( 6819 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6820 switch (event) { 6821 case OMX_EventCmdComplete: 6822 { 6823 status_t err = OK; 6824 if (data1 != (OMX_U32)OMX_CommandStateSet 6825 || data2 != (OMX_U32)OMX_StateIdle) { 6826 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 6827 asString((OMX_COMMANDTYPE)data1), data1, 6828 asString((OMX_STATETYPE)data2), data2); 6829 err = FAILED_TRANSACTION; 6830 } 6831 6832 if (err == OK) { 6833 err = mCodec->mOMX->sendCommand( 6834 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting); 6835 } 6836 6837 if (err != OK) { 6838 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6839 } else { 6840 mCodec->changeState(mCodec->mIdleToExecutingState); 6841 } 6842 6843 return true; 6844 } 6845 6846 default: 6847 return BaseState::onOMXEvent(event, data1, data2); 6848 } 6849} 6850 6851//////////////////////////////////////////////////////////////////////////////// 6852 6853ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 6854 : BaseState(codec) { 6855} 6856 6857void ACodec::IdleToExecutingState::stateEntered() { 6858 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 6859} 6860 6861bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6862 switch (msg->what()) { 6863 case kWhatSetParameters: 6864 case kWhatShutdown: 6865 { 6866 mCodec->deferMessage(msg); 6867 return true; 6868 } 6869 6870 case kWhatResume: 6871 { 6872 // We'll be active soon enough. 6873 return true; 6874 } 6875 6876 case kWhatFlush: 6877 { 6878 // We haven't even started yet, so we're flushed alright... 6879 sp<AMessage> notify = mCodec->mNotify->dup(); 6880 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6881 notify->post(); 6882 6883 return true; 6884 } 6885 6886 case kWhatSignalEndOfInputStream: 6887 { 6888 mCodec->onSignalEndOfInputStream(); 6889 return true; 6890 } 6891 6892 default: 6893 return BaseState::onMessageReceived(msg); 6894 } 6895} 6896 6897bool ACodec::IdleToExecutingState::onOMXEvent( 6898 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6899 switch (event) { 6900 case OMX_EventCmdComplete: 6901 { 6902 if (data1 != (OMX_U32)OMX_CommandStateSet 6903 || data2 != (OMX_U32)OMX_StateExecuting) { 6904 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 6905 asString((OMX_COMMANDTYPE)data1), data1, 6906 asString((OMX_STATETYPE)data2), data2); 6907 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6908 return true; 6909 } 6910 6911 mCodec->mExecutingState->resume(); 6912 mCodec->changeState(mCodec->mExecutingState); 6913 6914 return true; 6915 } 6916 6917 default: 6918 return BaseState::onOMXEvent(event, data1, data2); 6919 } 6920} 6921 6922//////////////////////////////////////////////////////////////////////////////// 6923 6924ACodec::ExecutingState::ExecutingState(ACodec *codec) 6925 : BaseState(codec), 6926 mActive(false) { 6927} 6928 6929ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 6930 OMX_U32 /* portIndex */) { 6931 return RESUBMIT_BUFFERS; 6932} 6933 6934void ACodec::ExecutingState::submitOutputMetaBuffers() { 6935 // submit as many buffers as there are input buffers with the codec 6936 // in case we are in port reconfiguring 6937 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 6938 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6939 6940 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 6941 if (mCodec->submitOutputMetadataBuffer() != OK) 6942 break; 6943 } 6944 } 6945 6946 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 6947 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 6948} 6949 6950void ACodec::ExecutingState::submitRegularOutputBuffers() { 6951 bool failed = false; 6952 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 6953 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 6954 6955 if (mCodec->mNativeWindow != NULL) { 6956 if (info->mStatus != BufferInfo::OWNED_BY_US 6957 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6958 ALOGE("buffers should be owned by us or the surface"); 6959 failed = true; 6960 break; 6961 } 6962 6963 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6964 continue; 6965 } 6966 } else { 6967 if (info->mStatus != BufferInfo::OWNED_BY_US) { 6968 ALOGE("buffers should be owned by us"); 6969 failed = true; 6970 break; 6971 } 6972 } 6973 6974 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 6975 6976 info->checkWriteFence("submitRegularOutputBuffers"); 6977 status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 6978 info->mFenceFd = -1; 6979 if (err != OK) { 6980 failed = true; 6981 break; 6982 } 6983 6984 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6985 } 6986 6987 if (failed) { 6988 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6989 } 6990} 6991 6992void ACodec::ExecutingState::submitOutputBuffers() { 6993 submitRegularOutputBuffers(); 6994 if (mCodec->storingMetadataInDecodedBuffers()) { 6995 submitOutputMetaBuffers(); 6996 } 6997} 6998 6999void ACodec::ExecutingState::resume() { 7000 if (mActive) { 7001 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 7002 return; 7003 } 7004 7005 submitOutputBuffers(); 7006 7007 // Post all available input buffers 7008 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 7009 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 7010 } 7011 7012 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 7013 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7014 if (info->mStatus == BufferInfo::OWNED_BY_US) { 7015 postFillThisBuffer(info); 7016 } 7017 } 7018 7019 mActive = true; 7020} 7021 7022void ACodec::ExecutingState::stateEntered() { 7023 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 7024 7025 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7026 mCodec->processDeferredMessages(); 7027} 7028 7029bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 7030 bool handled = false; 7031 7032 switch (msg->what()) { 7033 case kWhatShutdown: 7034 { 7035 int32_t keepComponentAllocated; 7036 CHECK(msg->findInt32( 7037 "keepComponentAllocated", &keepComponentAllocated)); 7038 7039 mCodec->mShutdownInProgress = true; 7040 mCodec->mExplicitShutdown = true; 7041 mCodec->mKeepComponentAllocated = keepComponentAllocated; 7042 7043 mActive = false; 7044 7045 status_t err = mCodec->mOMX->sendCommand( 7046 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 7047 if (err != OK) { 7048 if (keepComponentAllocated) { 7049 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7050 } 7051 // TODO: do some recovery here. 7052 } else { 7053 mCodec->changeState(mCodec->mExecutingToIdleState); 7054 } 7055 7056 handled = true; 7057 break; 7058 } 7059 7060 case kWhatFlush: 7061 { 7062 ALOGV("[%s] ExecutingState flushing now " 7063 "(codec owns %zu/%zu input, %zu/%zu output).", 7064 mCodec->mComponentName.c_str(), 7065 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 7066 mCodec->mBuffers[kPortIndexInput].size(), 7067 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 7068 mCodec->mBuffers[kPortIndexOutput].size()); 7069 7070 mActive = false; 7071 7072 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL); 7073 if (err != OK) { 7074 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7075 } else { 7076 mCodec->changeState(mCodec->mFlushingState); 7077 } 7078 7079 handled = true; 7080 break; 7081 } 7082 7083 case kWhatResume: 7084 { 7085 resume(); 7086 7087 handled = true; 7088 break; 7089 } 7090 7091 case kWhatRequestIDRFrame: 7092 { 7093 status_t err = mCodec->requestIDRFrame(); 7094 if (err != OK) { 7095 ALOGW("Requesting an IDR frame failed."); 7096 } 7097 7098 handled = true; 7099 break; 7100 } 7101 7102 case kWhatSetParameters: 7103 { 7104 sp<AMessage> params; 7105 CHECK(msg->findMessage("params", ¶ms)); 7106 7107 status_t err = mCodec->setParameters(params); 7108 7109 sp<AMessage> reply; 7110 if (msg->findMessage("reply", &reply)) { 7111 reply->setInt32("err", err); 7112 reply->post(); 7113 } 7114 7115 handled = true; 7116 break; 7117 } 7118 7119 case ACodec::kWhatSignalEndOfInputStream: 7120 { 7121 mCodec->onSignalEndOfInputStream(); 7122 handled = true; 7123 break; 7124 } 7125 7126 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7127 case kWhatSubmitOutputMetadataBufferIfEOS: 7128 { 7129 if (mCodec->mPortEOS[kPortIndexInput] && 7130 !mCodec->mPortEOS[kPortIndexOutput]) { 7131 status_t err = mCodec->submitOutputMetadataBuffer(); 7132 if (err == OK) { 7133 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7134 } 7135 } 7136 return true; 7137 } 7138 7139 default: 7140 handled = BaseState::onMessageReceived(msg); 7141 break; 7142 } 7143 7144 return handled; 7145} 7146 7147status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 7148 int32_t videoBitrate; 7149 if (params->findInt32("video-bitrate", &videoBitrate)) { 7150 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 7151 InitOMXParams(&configParams); 7152 configParams.nPortIndex = kPortIndexOutput; 7153 configParams.nEncodeBitrate = videoBitrate; 7154 7155 status_t err = mOMX->setConfig( 7156 mNode, 7157 OMX_IndexConfigVideoBitrate, 7158 &configParams, 7159 sizeof(configParams)); 7160 7161 if (err != OK) { 7162 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 7163 videoBitrate, err); 7164 7165 return err; 7166 } 7167 } 7168 7169 int64_t skipFramesBeforeUs; 7170 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 7171 status_t err = 7172 mOMX->setInternalOption( 7173 mNode, 7174 kPortIndexInput, 7175 IOMX::INTERNAL_OPTION_START_TIME, 7176 &skipFramesBeforeUs, 7177 sizeof(skipFramesBeforeUs)); 7178 7179 if (err != OK) { 7180 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 7181 return err; 7182 } 7183 } 7184 7185 int32_t dropInputFrames; 7186 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 7187 bool suspend = dropInputFrames != 0; 7188 7189 status_t err = 7190 mOMX->setInternalOption( 7191 mNode, 7192 kPortIndexInput, 7193 IOMX::INTERNAL_OPTION_SUSPEND, 7194 &suspend, 7195 sizeof(suspend)); 7196 7197 if (err != OK) { 7198 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 7199 return err; 7200 } 7201 } 7202 7203 int32_t dummy; 7204 if (params->findInt32("request-sync", &dummy)) { 7205 status_t err = requestIDRFrame(); 7206 7207 if (err != OK) { 7208 ALOGE("Requesting a sync frame failed w/ err %d", err); 7209 return err; 7210 } 7211 } 7212 7213 float rate; 7214 if (params->findFloat("operating-rate", &rate) && rate > 0) { 7215 status_t err = setOperatingRate(rate, mIsVideo); 7216 if (err != OK) { 7217 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 7218 return err; 7219 } 7220 } 7221 7222 int32_t intraRefreshPeriod = 0; 7223 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 7224 && intraRefreshPeriod > 0) { 7225 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 7226 if (err != OK) { 7227 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 7228 mComponentName.c_str()); 7229 err = OK; 7230 } 7231 } 7232 7233 status_t err = configureTemporalLayers(params, false /* inConfigure */, mOutputFormat); 7234 if (err != OK) { 7235 err = OK; // ignore failure 7236 } 7237 7238 return err; 7239} 7240 7241void ACodec::onSignalEndOfInputStream() { 7242 sp<AMessage> notify = mNotify->dup(); 7243 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 7244 7245 status_t err = mOMX->signalEndOfInputStream(mNode); 7246 if (err != OK) { 7247 notify->setInt32("err", err); 7248 } 7249 notify->post(); 7250} 7251 7252bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 7253 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7254 return true; 7255} 7256 7257bool ACodec::ExecutingState::onOMXEvent( 7258 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7259 switch (event) { 7260 case OMX_EventPortSettingsChanged: 7261 { 7262 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 7263 7264 mCodec->onOutputFormatChanged(); 7265 7266 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 7267 mCodec->mMetadataBuffersToSubmit = 0; 7268 CHECK_EQ(mCodec->mOMX->sendCommand( 7269 mCodec->mNode, 7270 OMX_CommandPortDisable, kPortIndexOutput), 7271 (status_t)OK); 7272 7273 mCodec->freeOutputBuffersNotOwnedByComponent(); 7274 7275 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7276 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7277 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7278 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7279 mCodec->mComponentName.c_str(), data2); 7280 } 7281 7282 return true; 7283 } 7284 7285 case OMX_EventBufferFlag: 7286 { 7287 return true; 7288 } 7289 7290 default: 7291 return BaseState::onOMXEvent(event, data1, data2); 7292 } 7293} 7294 7295//////////////////////////////////////////////////////////////////////////////// 7296 7297ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7298 ACodec *codec) 7299 : BaseState(codec) { 7300} 7301 7302ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7303 OMX_U32 portIndex) { 7304 if (portIndex == kPortIndexOutput) { 7305 return FREE_BUFFERS; 7306 } 7307 7308 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7309 7310 return RESUBMIT_BUFFERS; 7311} 7312 7313bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7314 const sp<AMessage> &msg) { 7315 bool handled = false; 7316 7317 switch (msg->what()) { 7318 case kWhatFlush: 7319 case kWhatShutdown: 7320 case kWhatResume: 7321 case kWhatSetParameters: 7322 { 7323 if (msg->what() == kWhatResume) { 7324 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7325 } 7326 7327 mCodec->deferMessage(msg); 7328 handled = true; 7329 break; 7330 } 7331 7332 default: 7333 handled = BaseState::onMessageReceived(msg); 7334 break; 7335 } 7336 7337 return handled; 7338} 7339 7340void ACodec::OutputPortSettingsChangedState::stateEntered() { 7341 ALOGV("[%s] Now handling output port settings change", 7342 mCodec->mComponentName.c_str()); 7343} 7344 7345bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7346 int64_t mediaTimeUs, nsecs_t systemNano) { 7347 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7348 return true; 7349} 7350 7351bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7352 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7353 switch (event) { 7354 case OMX_EventCmdComplete: 7355 { 7356 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7357 if (data2 != (OMX_U32)kPortIndexOutput) { 7358 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7359 return false; 7360 } 7361 7362 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7363 7364 status_t err = OK; 7365 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7366 ALOGE("disabled port should be empty, but has %zu buffers", 7367 mCodec->mBuffers[kPortIndexOutput].size()); 7368 err = FAILED_TRANSACTION; 7369 } else { 7370 mCodec->mDealer[kPortIndexOutput].clear(); 7371 } 7372 7373 if (err == OK) { 7374 err = mCodec->mOMX->sendCommand( 7375 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput); 7376 } 7377 7378 if (err == OK) { 7379 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7380 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7381 "reconfiguration: (%d)", err); 7382 } 7383 7384 if (err != OK) { 7385 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7386 7387 // This is technically not correct, but appears to be 7388 // the only way to free the component instance. 7389 // Controlled transitioning from excecuting->idle 7390 // and idle->loaded seem impossible probably because 7391 // the output port never finishes re-enabling. 7392 mCodec->mShutdownInProgress = true; 7393 mCodec->mKeepComponentAllocated = false; 7394 mCodec->changeState(mCodec->mLoadedState); 7395 } 7396 7397 return true; 7398 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7399 if (data2 != (OMX_U32)kPortIndexOutput) { 7400 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7401 return false; 7402 } 7403 7404 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7405 7406 if (mCodec->mExecutingState->active()) { 7407 mCodec->mExecutingState->submitOutputBuffers(); 7408 } 7409 7410 mCodec->changeState(mCodec->mExecutingState); 7411 7412 return true; 7413 } 7414 7415 return false; 7416 } 7417 7418 default: 7419 return false; 7420 } 7421} 7422 7423//////////////////////////////////////////////////////////////////////////////// 7424 7425ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7426 : BaseState(codec), 7427 mComponentNowIdle(false) { 7428} 7429 7430bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7431 bool handled = false; 7432 7433 switch (msg->what()) { 7434 case kWhatFlush: 7435 { 7436 // Don't send me a flush request if you previously wanted me 7437 // to shutdown. 7438 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7439 break; 7440 } 7441 7442 case kWhatShutdown: 7443 { 7444 // We're already doing that... 7445 7446 handled = true; 7447 break; 7448 } 7449 7450 default: 7451 handled = BaseState::onMessageReceived(msg); 7452 break; 7453 } 7454 7455 return handled; 7456} 7457 7458void ACodec::ExecutingToIdleState::stateEntered() { 7459 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7460 7461 mComponentNowIdle = false; 7462 mCodec->mLastOutputFormat.clear(); 7463} 7464 7465bool ACodec::ExecutingToIdleState::onOMXEvent( 7466 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7467 switch (event) { 7468 case OMX_EventCmdComplete: 7469 { 7470 if (data1 != (OMX_U32)OMX_CommandStateSet 7471 || data2 != (OMX_U32)OMX_StateIdle) { 7472 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7473 asString((OMX_COMMANDTYPE)data1), data1, 7474 asString((OMX_STATETYPE)data2), data2); 7475 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7476 return true; 7477 } 7478 7479 mComponentNowIdle = true; 7480 7481 changeStateIfWeOwnAllBuffers(); 7482 7483 return true; 7484 } 7485 7486 case OMX_EventPortSettingsChanged: 7487 case OMX_EventBufferFlag: 7488 { 7489 // We're shutting down and don't care about this anymore. 7490 return true; 7491 } 7492 7493 default: 7494 return BaseState::onOMXEvent(event, data1, data2); 7495 } 7496} 7497 7498void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7499 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7500 status_t err = mCodec->mOMX->sendCommand( 7501 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7502 if (err == OK) { 7503 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7504 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7505 if (err == OK) { 7506 err = err2; 7507 } 7508 } 7509 7510 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7511 && mCodec->mNativeWindow != NULL) { 7512 // We push enough 1x1 blank buffers to ensure that one of 7513 // them has made it to the display. This allows the OMX 7514 // component teardown to zero out any protected buffers 7515 // without the risk of scanning out one of those buffers. 7516 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7517 } 7518 7519 if (err != OK) { 7520 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7521 return; 7522 } 7523 7524 mCodec->changeState(mCodec->mIdleToLoadedState); 7525 } 7526} 7527 7528void ACodec::ExecutingToIdleState::onInputBufferFilled( 7529 const sp<AMessage> &msg) { 7530 BaseState::onInputBufferFilled(msg); 7531 7532 changeStateIfWeOwnAllBuffers(); 7533} 7534 7535void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7536 const sp<AMessage> &msg) { 7537 BaseState::onOutputBufferDrained(msg); 7538 7539 changeStateIfWeOwnAllBuffers(); 7540} 7541 7542//////////////////////////////////////////////////////////////////////////////// 7543 7544ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7545 : BaseState(codec) { 7546} 7547 7548bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7549 bool handled = false; 7550 7551 switch (msg->what()) { 7552 case kWhatShutdown: 7553 { 7554 // We're already doing that... 7555 7556 handled = true; 7557 break; 7558 } 7559 7560 case kWhatFlush: 7561 { 7562 // Don't send me a flush request if you previously wanted me 7563 // to shutdown. 7564 ALOGE("Got flush request in IdleToLoadedState"); 7565 break; 7566 } 7567 7568 default: 7569 handled = BaseState::onMessageReceived(msg); 7570 break; 7571 } 7572 7573 return handled; 7574} 7575 7576void ACodec::IdleToLoadedState::stateEntered() { 7577 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7578} 7579 7580bool ACodec::IdleToLoadedState::onOMXEvent( 7581 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7582 switch (event) { 7583 case OMX_EventCmdComplete: 7584 { 7585 if (data1 != (OMX_U32)OMX_CommandStateSet 7586 || data2 != (OMX_U32)OMX_StateLoaded) { 7587 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7588 asString((OMX_COMMANDTYPE)data1), data1, 7589 asString((OMX_STATETYPE)data2), data2); 7590 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7591 return true; 7592 } 7593 7594 mCodec->changeState(mCodec->mLoadedState); 7595 7596 return true; 7597 } 7598 7599 default: 7600 return BaseState::onOMXEvent(event, data1, data2); 7601 } 7602} 7603 7604//////////////////////////////////////////////////////////////////////////////// 7605 7606ACodec::FlushingState::FlushingState(ACodec *codec) 7607 : BaseState(codec) { 7608} 7609 7610void ACodec::FlushingState::stateEntered() { 7611 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7612 7613 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7614} 7615 7616bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7617 bool handled = false; 7618 7619 switch (msg->what()) { 7620 case kWhatShutdown: 7621 { 7622 mCodec->deferMessage(msg); 7623 break; 7624 } 7625 7626 case kWhatFlush: 7627 { 7628 // We're already doing this right now. 7629 handled = true; 7630 break; 7631 } 7632 7633 default: 7634 handled = BaseState::onMessageReceived(msg); 7635 break; 7636 } 7637 7638 return handled; 7639} 7640 7641bool ACodec::FlushingState::onOMXEvent( 7642 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7643 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7644 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7645 7646 switch (event) { 7647 case OMX_EventCmdComplete: 7648 { 7649 if (data1 != (OMX_U32)OMX_CommandFlush) { 7650 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7651 asString((OMX_COMMANDTYPE)data1), data1, data2); 7652 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7653 return true; 7654 } 7655 7656 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7657 if (mFlushComplete[data2]) { 7658 ALOGW("Flush already completed for %s port", 7659 data2 == kPortIndexInput ? "input" : "output"); 7660 return true; 7661 } 7662 mFlushComplete[data2] = true; 7663 7664 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7665 changeStateIfWeOwnAllBuffers(); 7666 } 7667 } else if (data2 == OMX_ALL) { 7668 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7669 ALOGW("received flush complete event for OMX_ALL before ports have been" 7670 "flushed (%d/%d)", 7671 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7672 return false; 7673 } 7674 7675 changeStateIfWeOwnAllBuffers(); 7676 } else { 7677 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7678 } 7679 7680 return true; 7681 } 7682 7683 case OMX_EventPortSettingsChanged: 7684 { 7685 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7686 msg->setInt32("type", omx_message::EVENT); 7687 msg->setInt32("node", mCodec->mNode); 7688 msg->setInt32("event", event); 7689 msg->setInt32("data1", data1); 7690 msg->setInt32("data2", data2); 7691 7692 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7693 mCodec->mComponentName.c_str()); 7694 7695 mCodec->deferMessage(msg); 7696 7697 return true; 7698 } 7699 7700 default: 7701 return BaseState::onOMXEvent(event, data1, data2); 7702 } 7703 7704 return true; 7705} 7706 7707void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7708 BaseState::onOutputBufferDrained(msg); 7709 7710 changeStateIfWeOwnAllBuffers(); 7711} 7712 7713void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7714 BaseState::onInputBufferFilled(msg); 7715 7716 changeStateIfWeOwnAllBuffers(); 7717} 7718 7719void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 7720 if (mFlushComplete[kPortIndexInput] 7721 && mFlushComplete[kPortIndexOutput] 7722 && mCodec->allYourBuffersAreBelongToUs()) { 7723 // We now own all buffers except possibly those still queued with 7724 // the native window for rendering. Let's get those back as well. 7725 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 7726 7727 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7728 7729 sp<AMessage> notify = mCodec->mNotify->dup(); 7730 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7731 notify->post(); 7732 7733 mCodec->mPortEOS[kPortIndexInput] = 7734 mCodec->mPortEOS[kPortIndexOutput] = false; 7735 7736 mCodec->mInputEOSResult = OK; 7737 7738 if (mCodec->mSkipCutBuffer != NULL) { 7739 mCodec->mSkipCutBuffer->clear(); 7740 } 7741 7742 mCodec->changeState(mCodec->mExecutingState); 7743 } 7744} 7745 7746status_t ACodec::queryCapabilities( 7747 const AString &name, const AString &mime, bool isEncoder, 7748 sp<MediaCodecInfo::Capabilities> *caps) { 7749 (*caps).clear(); 7750 const char *role = GetComponentRole(isEncoder, mime.c_str()); 7751 if (role == NULL) { 7752 return BAD_VALUE; 7753 } 7754 7755 OMXClient client; 7756 status_t err = client.connect(); 7757 if (err != OK) { 7758 return err; 7759 } 7760 7761 sp<IOMX> omx = client.interface(); 7762 sp<CodecObserver> observer = new CodecObserver; 7763 IOMX::node_id node = 0; 7764 7765 err = omx->allocateNode(name.c_str(), observer, NULL, &node); 7766 if (err != OK) { 7767 client.disconnect(); 7768 return err; 7769 } 7770 7771 err = SetComponentRole(omx, node, role); 7772 if (err != OK) { 7773 omx->freeNode(node); 7774 client.disconnect(); 7775 return err; 7776 } 7777 7778 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 7779 bool isVideo = mime.startsWithIgnoreCase("video/"); 7780 7781 if (isVideo) { 7782 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 7783 InitOMXParams(¶m); 7784 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7785 7786 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7787 param.nProfileIndex = index; 7788 status_t err = omx->getParameter( 7789 node, OMX_IndexParamVideoProfileLevelQuerySupported, 7790 ¶m, sizeof(param)); 7791 if (err != OK) { 7792 break; 7793 } 7794 builder->addProfileLevel(param.eProfile, param.eLevel); 7795 7796 if (index == kMaxIndicesToCheck) { 7797 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 7798 name.c_str(), index, 7799 param.eProfile, param.eLevel); 7800 } 7801 } 7802 7803 // Color format query 7804 // return colors in the order reported by the OMX component 7805 // prefix "flexible" standard ones with the flexible equivalent 7806 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 7807 InitOMXParams(&portFormat); 7808 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 7809 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 7810 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7811 portFormat.nIndex = index; 7812 status_t err = omx->getParameter( 7813 node, OMX_IndexParamVideoPortFormat, 7814 &portFormat, sizeof(portFormat)); 7815 if (err != OK) { 7816 break; 7817 } 7818 7819 OMX_U32 flexibleEquivalent; 7820 if (IsFlexibleColorFormat( 7821 omx, node, portFormat.eColorFormat, false /* usingNativeWindow */, 7822 &flexibleEquivalent)) { 7823 bool marked = false; 7824 for (size_t i = 0; i < supportedColors.size(); ++i) { 7825 if (supportedColors[i] == flexibleEquivalent) { 7826 marked = true; 7827 break; 7828 } 7829 } 7830 if (!marked) { 7831 supportedColors.push(flexibleEquivalent); 7832 builder->addColorFormat(flexibleEquivalent); 7833 } 7834 } 7835 supportedColors.push(portFormat.eColorFormat); 7836 builder->addColorFormat(portFormat.eColorFormat); 7837 7838 if (index == kMaxIndicesToCheck) { 7839 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 7840 name.c_str(), index, 7841 asString(portFormat.eColorFormat), portFormat.eColorFormat); 7842 } 7843 } 7844 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 7845 // More audio codecs if they have profiles. 7846 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 7847 InitOMXParams(¶m); 7848 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7849 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7850 param.nProfileIndex = index; 7851 status_t err = omx->getParameter( 7852 node, (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 7853 ¶m, sizeof(param)); 7854 if (err != OK) { 7855 break; 7856 } 7857 // For audio, level is ignored. 7858 builder->addProfileLevel(param.eProfile, 0 /* level */); 7859 7860 if (index == kMaxIndicesToCheck) { 7861 ALOGW("[%s] stopping checking profiles after %u: %x", 7862 name.c_str(), index, 7863 param.eProfile); 7864 } 7865 } 7866 7867 // NOTE: Without Android extensions, OMX does not provide a way to query 7868 // AAC profile support 7869 if (param.nProfileIndex == 0) { 7870 ALOGW("component %s doesn't support profile query.", name.c_str()); 7871 } 7872 } 7873 7874 if (isVideo && !isEncoder) { 7875 native_handle_t *sidebandHandle = NULL; 7876 if (omx->configureVideoTunnelMode( 7877 node, kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 7878 // tunneled playback includes adaptive playback 7879 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 7880 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 7881 } else if (omx->storeMetaDataInBuffers( 7882 node, kPortIndexOutput, OMX_TRUE) == OK || 7883 omx->prepareForAdaptivePlayback( 7884 node, kPortIndexOutput, OMX_TRUE, 7885 1280 /* width */, 720 /* height */) == OK) { 7886 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 7887 } 7888 } 7889 7890 if (isVideo && isEncoder) { 7891 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 7892 InitOMXParams(¶ms); 7893 params.nPortIndex = kPortIndexOutput; 7894 // TODO: should we verify if fallback is supported? 7895 if (omx->getConfig( 7896 node, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 7897 ¶ms, sizeof(params)) == OK) { 7898 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 7899 } 7900 } 7901 7902 *caps = builder; 7903 omx->freeNode(node); 7904 client.disconnect(); 7905 return OK; 7906} 7907 7908// These are supposed be equivalent to the logic in 7909// "audio_channel_out_mask_from_count". 7910//static 7911status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 7912 switch (numChannels) { 7913 case 1: 7914 map[0] = OMX_AUDIO_ChannelCF; 7915 break; 7916 case 2: 7917 map[0] = OMX_AUDIO_ChannelLF; 7918 map[1] = OMX_AUDIO_ChannelRF; 7919 break; 7920 case 3: 7921 map[0] = OMX_AUDIO_ChannelLF; 7922 map[1] = OMX_AUDIO_ChannelRF; 7923 map[2] = OMX_AUDIO_ChannelCF; 7924 break; 7925 case 4: 7926 map[0] = OMX_AUDIO_ChannelLF; 7927 map[1] = OMX_AUDIO_ChannelRF; 7928 map[2] = OMX_AUDIO_ChannelLR; 7929 map[3] = OMX_AUDIO_ChannelRR; 7930 break; 7931 case 5: 7932 map[0] = OMX_AUDIO_ChannelLF; 7933 map[1] = OMX_AUDIO_ChannelRF; 7934 map[2] = OMX_AUDIO_ChannelCF; 7935 map[3] = OMX_AUDIO_ChannelLR; 7936 map[4] = OMX_AUDIO_ChannelRR; 7937 break; 7938 case 6: 7939 map[0] = OMX_AUDIO_ChannelLF; 7940 map[1] = OMX_AUDIO_ChannelRF; 7941 map[2] = OMX_AUDIO_ChannelCF; 7942 map[3] = OMX_AUDIO_ChannelLFE; 7943 map[4] = OMX_AUDIO_ChannelLR; 7944 map[5] = OMX_AUDIO_ChannelRR; 7945 break; 7946 case 7: 7947 map[0] = OMX_AUDIO_ChannelLF; 7948 map[1] = OMX_AUDIO_ChannelRF; 7949 map[2] = OMX_AUDIO_ChannelCF; 7950 map[3] = OMX_AUDIO_ChannelLFE; 7951 map[4] = OMX_AUDIO_ChannelLR; 7952 map[5] = OMX_AUDIO_ChannelRR; 7953 map[6] = OMX_AUDIO_ChannelCS; 7954 break; 7955 case 8: 7956 map[0] = OMX_AUDIO_ChannelLF; 7957 map[1] = OMX_AUDIO_ChannelRF; 7958 map[2] = OMX_AUDIO_ChannelCF; 7959 map[3] = OMX_AUDIO_ChannelLFE; 7960 map[4] = OMX_AUDIO_ChannelLR; 7961 map[5] = OMX_AUDIO_ChannelRR; 7962 map[6] = OMX_AUDIO_ChannelLS; 7963 map[7] = OMX_AUDIO_ChannelRS; 7964 break; 7965 default: 7966 return -EINVAL; 7967 } 7968 7969 return OK; 7970} 7971 7972} // namespace android 7973