ACodec.cpp revision 52fda70b8ed8f03cdb63c40c8eff9734b86f4151
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47 48#include <OMX_AudioExt.h> 49#include <OMX_VideoExt.h> 50#include <OMX_Component.h> 51#include <OMX_IndexExt.h> 52#include <OMX_AsString.h> 53 54#include "include/avc_utils.h" 55#include "include/DataConverter.h" 56#include "omx/OMXUtils.h" 57 58namespace android { 59 60enum { 61 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 62}; 63 64// OMX errors are directly mapped into status_t range if 65// there is no corresponding MediaError status code. 66// Use the statusFromOMXError(int32_t omxError) function. 67// 68// Currently this is a direct map. 69// See frameworks/native/include/media/openmax/OMX_Core.h 70// 71// Vendor OMX errors from 0x90000000 - 0x9000FFFF 72// Extension OMX errors from 0x8F000000 - 0x90000000 73// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 74// 75 76// returns true if err is a recognized OMX error code. 77// as OMX error is OMX_S32, this is an int32_t type 78static inline bool isOMXError(int32_t err) { 79 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 80} 81 82// converts an OMX error to a status_t 83static inline status_t statusFromOMXError(int32_t omxError) { 84 switch (omxError) { 85 case OMX_ErrorInvalidComponentName: 86 case OMX_ErrorComponentNotFound: 87 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 88 default: 89 return isOMXError(omxError) ? omxError : 0; // no translation required 90 } 91} 92 93// checks and converts status_t to a non-side-effect status_t 94static inline status_t makeNoSideEffectStatus(status_t err) { 95 switch (err) { 96 // the following errors have side effects and may come 97 // from other code modules. Remap for safety reasons. 98 case INVALID_OPERATION: 99 case DEAD_OBJECT: 100 return UNKNOWN_ERROR; 101 default: 102 return err; 103 } 104} 105 106struct MessageList : public RefBase { 107 MessageList() { 108 } 109 virtual ~MessageList() { 110 } 111 std::list<sp<AMessage> > &getList() { return mList; } 112private: 113 std::list<sp<AMessage> > mList; 114 115 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 116}; 117 118static sp<DataConverter> getCopyConverter() { 119 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited 120 static sp<DataConverter> sCopyConverter; // zero-inited 121 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); 122 return sCopyConverter; 123} 124 125struct CodecObserver : public BnOMXObserver { 126 CodecObserver() {} 127 128 void setNotificationMessage(const sp<AMessage> &msg) { 129 mNotify = msg; 130 } 131 132 // from IOMXObserver 133 virtual void onMessages(const std::list<omx_message> &messages) { 134 if (messages.empty()) { 135 return; 136 } 137 138 sp<AMessage> notify = mNotify->dup(); 139 bool first = true; 140 sp<MessageList> msgList = new MessageList(); 141 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 142 it != messages.cend(); ++it) { 143 const omx_message &omx_msg = *it; 144 if (first) { 145 notify->setInt32("node", omx_msg.node); 146 first = false; 147 } 148 149 sp<AMessage> msg = new AMessage; 150 msg->setInt32("type", omx_msg.type); 151 switch (omx_msg.type) { 152 case omx_message::EVENT: 153 { 154 msg->setInt32("event", omx_msg.u.event_data.event); 155 msg->setInt32("data1", omx_msg.u.event_data.data1); 156 msg->setInt32("data2", omx_msg.u.event_data.data2); 157 break; 158 } 159 160 case omx_message::EMPTY_BUFFER_DONE: 161 { 162 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 163 msg->setInt32("fence_fd", omx_msg.fenceFd); 164 break; 165 } 166 167 case omx_message::FILL_BUFFER_DONE: 168 { 169 msg->setInt32( 170 "buffer", omx_msg.u.extended_buffer_data.buffer); 171 msg->setInt32( 172 "range_offset", 173 omx_msg.u.extended_buffer_data.range_offset); 174 msg->setInt32( 175 "range_length", 176 omx_msg.u.extended_buffer_data.range_length); 177 msg->setInt32( 178 "flags", 179 omx_msg.u.extended_buffer_data.flags); 180 msg->setInt64( 181 "timestamp", 182 omx_msg.u.extended_buffer_data.timestamp); 183 msg->setInt32( 184 "fence_fd", omx_msg.fenceFd); 185 break; 186 } 187 188 case omx_message::FRAME_RENDERED: 189 { 190 msg->setInt64( 191 "media_time_us", omx_msg.u.render_data.timestamp); 192 msg->setInt64( 193 "system_nano", omx_msg.u.render_data.nanoTime); 194 break; 195 } 196 197 default: 198 ALOGE("Unrecognized message type: %d", omx_msg.type); 199 break; 200 } 201 msgList->getList().push_back(msg); 202 } 203 notify->setObject("messages", msgList); 204 notify->post(); 205 } 206 207protected: 208 virtual ~CodecObserver() {} 209 210private: 211 sp<AMessage> mNotify; 212 213 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 214}; 215 216//////////////////////////////////////////////////////////////////////////////// 217 218struct ACodec::BaseState : public AState { 219 explicit BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 220 221protected: 222 enum PortMode { 223 KEEP_BUFFERS, 224 RESUBMIT_BUFFERS, 225 FREE_BUFFERS, 226 }; 227 228 ACodec *mCodec; 229 230 virtual PortMode getPortMode(OMX_U32 portIndex); 231 232 virtual bool onMessageReceived(const sp<AMessage> &msg); 233 234 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 235 236 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 237 virtual void onInputBufferFilled(const sp<AMessage> &msg); 238 239 void postFillThisBuffer(BufferInfo *info); 240 241private: 242 // Handles an OMX message. Returns true iff message was handled. 243 bool onOMXMessage(const sp<AMessage> &msg); 244 245 // Handles a list of messages. Returns true iff messages were handled. 246 bool onOMXMessageList(const sp<AMessage> &msg); 247 248 // returns true iff this message is for this component and the component is alive 249 bool checkOMXMessage(const sp<AMessage> &msg); 250 251 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 252 253 bool onOMXFillBufferDone( 254 IOMX::buffer_id bufferID, 255 size_t rangeOffset, size_t rangeLength, 256 OMX_U32 flags, 257 int64_t timeUs, 258 int fenceFd); 259 260 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 261 262 void getMoreInputDataIfPossible(); 263 264 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 265}; 266 267//////////////////////////////////////////////////////////////////////////////// 268 269struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 270 explicit DeathNotifier(const sp<AMessage> ¬ify) 271 : mNotify(notify) { 272 } 273 274 virtual void binderDied(const wp<IBinder> &) { 275 mNotify->post(); 276 } 277 278protected: 279 virtual ~DeathNotifier() {} 280 281private: 282 sp<AMessage> mNotify; 283 284 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 285}; 286 287struct ACodec::UninitializedState : public ACodec::BaseState { 288 explicit UninitializedState(ACodec *codec); 289 290protected: 291 virtual bool onMessageReceived(const sp<AMessage> &msg); 292 virtual void stateEntered(); 293 294private: 295 void onSetup(const sp<AMessage> &msg); 296 bool onAllocateComponent(const sp<AMessage> &msg); 297 298 sp<DeathNotifier> mDeathNotifier; 299 300 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 301}; 302 303//////////////////////////////////////////////////////////////////////////////// 304 305struct ACodec::LoadedState : public ACodec::BaseState { 306 explicit LoadedState(ACodec *codec); 307 308protected: 309 virtual bool onMessageReceived(const sp<AMessage> &msg); 310 virtual void stateEntered(); 311 312private: 313 friend struct ACodec::UninitializedState; 314 315 bool onConfigureComponent(const sp<AMessage> &msg); 316 void onCreateInputSurface(const sp<AMessage> &msg); 317 void onSetInputSurface(const sp<AMessage> &msg); 318 void onStart(); 319 void onShutdown(bool keepComponentAllocated); 320 321 status_t setupInputSurface(); 322 323 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 324}; 325 326//////////////////////////////////////////////////////////////////////////////// 327 328struct ACodec::LoadedToIdleState : public ACodec::BaseState { 329 explicit LoadedToIdleState(ACodec *codec); 330 331protected: 332 virtual bool onMessageReceived(const sp<AMessage> &msg); 333 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 334 virtual void stateEntered(); 335 336private: 337 status_t allocateBuffers(); 338 339 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 340}; 341 342//////////////////////////////////////////////////////////////////////////////// 343 344struct ACodec::IdleToExecutingState : public ACodec::BaseState { 345 explicit IdleToExecutingState(ACodec *codec); 346 347protected: 348 virtual bool onMessageReceived(const sp<AMessage> &msg); 349 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 350 virtual void stateEntered(); 351 352private: 353 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 354}; 355 356//////////////////////////////////////////////////////////////////////////////// 357 358struct ACodec::ExecutingState : public ACodec::BaseState { 359 explicit ExecutingState(ACodec *codec); 360 361 void submitRegularOutputBuffers(); 362 void submitOutputMetaBuffers(); 363 void submitOutputBuffers(); 364 365 // Submit output buffers to the decoder, submit input buffers to client 366 // to fill with data. 367 void resume(); 368 369 // Returns true iff input and output buffers are in play. 370 bool active() const { return mActive; } 371 372protected: 373 virtual PortMode getPortMode(OMX_U32 portIndex); 374 virtual bool onMessageReceived(const sp<AMessage> &msg); 375 virtual void stateEntered(); 376 377 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 378 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 379 380private: 381 bool mActive; 382 383 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 384}; 385 386//////////////////////////////////////////////////////////////////////////////// 387 388struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 389 explicit OutputPortSettingsChangedState(ACodec *codec); 390 391protected: 392 virtual PortMode getPortMode(OMX_U32 portIndex); 393 virtual bool onMessageReceived(const sp<AMessage> &msg); 394 virtual void stateEntered(); 395 396 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 397 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 398 399private: 400 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 401}; 402 403//////////////////////////////////////////////////////////////////////////////// 404 405struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 406 explicit ExecutingToIdleState(ACodec *codec); 407 408protected: 409 virtual bool onMessageReceived(const sp<AMessage> &msg); 410 virtual void stateEntered(); 411 412 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 413 414 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 415 virtual void onInputBufferFilled(const sp<AMessage> &msg); 416 417private: 418 void changeStateIfWeOwnAllBuffers(); 419 420 bool mComponentNowIdle; 421 422 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 423}; 424 425//////////////////////////////////////////////////////////////////////////////// 426 427struct ACodec::IdleToLoadedState : public ACodec::BaseState { 428 explicit IdleToLoadedState(ACodec *codec); 429 430protected: 431 virtual bool onMessageReceived(const sp<AMessage> &msg); 432 virtual void stateEntered(); 433 434 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 435 436private: 437 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 438}; 439 440//////////////////////////////////////////////////////////////////////////////// 441 442struct ACodec::FlushingState : public ACodec::BaseState { 443 explicit FlushingState(ACodec *codec); 444 445protected: 446 virtual bool onMessageReceived(const sp<AMessage> &msg); 447 virtual void stateEntered(); 448 449 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 450 451 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 452 virtual void onInputBufferFilled(const sp<AMessage> &msg); 453 454private: 455 bool mFlushComplete[2]; 456 457 void changeStateIfWeOwnAllBuffers(); 458 459 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 460}; 461 462//////////////////////////////////////////////////////////////////////////////// 463 464void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 465 if (mFenceFd >= 0) { 466 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 467 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 468 } 469 mFenceFd = fenceFd; 470 mIsReadFence = false; 471} 472 473void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 474 if (mFenceFd >= 0) { 475 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 476 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 477 } 478 mFenceFd = fenceFd; 479 mIsReadFence = true; 480} 481 482void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 483 if (mFenceFd >= 0 && mIsReadFence) { 484 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 485 } 486} 487 488void ACodec::BufferInfo::checkReadFence(const char *dbg) { 489 if (mFenceFd >= 0 && !mIsReadFence) { 490 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 491 } 492} 493 494//////////////////////////////////////////////////////////////////////////////// 495 496ACodec::ACodec() 497 : mSampleRate(0), 498 mQuirks(0), 499 mNode(0), 500 mUsingNativeWindow(false), 501 mNativeWindowUsageBits(0), 502 mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN), 503 mIsVideo(false), 504 mIsEncoder(false), 505 mFatalError(false), 506 mShutdownInProgress(false), 507 mExplicitShutdown(false), 508 mIsLegacyVP9Decoder(false), 509 mEncoderDelay(0), 510 mEncoderPadding(0), 511 mRotationDegrees(0), 512 mChannelMaskPresent(false), 513 mChannelMask(0), 514 mDequeueCounter(0), 515 mInputMetadataType(kMetadataBufferTypeInvalid), 516 mOutputMetadataType(kMetadataBufferTypeInvalid), 517 mLegacyAdaptiveExperiment(false), 518 mMetadataBuffersToSubmit(0), 519 mNumUndequeuedBuffers(0), 520 mRepeatFrameDelayUs(-1ll), 521 mMaxPtsGapUs(-1ll), 522 mMaxFps(-1), 523 mTimePerFrameUs(-1ll), 524 mTimePerCaptureUs(-1ll), 525 mCreateInputBuffersSuspended(false), 526 mTunneled(false), 527 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0), 528 mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0) { 529 mUninitializedState = new UninitializedState(this); 530 mLoadedState = new LoadedState(this); 531 mLoadedToIdleState = new LoadedToIdleState(this); 532 mIdleToExecutingState = new IdleToExecutingState(this); 533 mExecutingState = new ExecutingState(this); 534 535 mOutputPortSettingsChangedState = 536 new OutputPortSettingsChangedState(this); 537 538 mExecutingToIdleState = new ExecutingToIdleState(this); 539 mIdleToLoadedState = new IdleToLoadedState(this); 540 mFlushingState = new FlushingState(this); 541 542 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 543 mInputEOSResult = OK; 544 545 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 546 547 changeState(mUninitializedState); 548} 549 550ACodec::~ACodec() { 551} 552 553void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 554 mNotify = msg; 555} 556 557void ACodec::initiateSetup(const sp<AMessage> &msg) { 558 msg->setWhat(kWhatSetup); 559 msg->setTarget(this); 560 msg->post(); 561} 562 563void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 564 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 565 msg->setMessage("params", params); 566 msg->post(); 567} 568 569void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 570 msg->setWhat(kWhatAllocateComponent); 571 msg->setTarget(this); 572 msg->post(); 573} 574 575void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 576 msg->setWhat(kWhatConfigureComponent); 577 msg->setTarget(this); 578 msg->post(); 579} 580 581status_t ACodec::setSurface(const sp<Surface> &surface) { 582 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 583 msg->setObject("surface", surface); 584 585 sp<AMessage> response; 586 status_t err = msg->postAndAwaitResponse(&response); 587 588 if (err == OK) { 589 (void)response->findInt32("err", &err); 590 } 591 return err; 592} 593 594void ACodec::initiateCreateInputSurface() { 595 (new AMessage(kWhatCreateInputSurface, this))->post(); 596} 597 598void ACodec::initiateSetInputSurface( 599 const sp<PersistentSurface> &surface) { 600 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 601 msg->setObject("input-surface", surface); 602 msg->post(); 603} 604 605void ACodec::signalEndOfInputStream() { 606 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 607} 608 609void ACodec::initiateStart() { 610 (new AMessage(kWhatStart, this))->post(); 611} 612 613void ACodec::signalFlush() { 614 ALOGV("[%s] signalFlush", mComponentName.c_str()); 615 (new AMessage(kWhatFlush, this))->post(); 616} 617 618void ACodec::signalResume() { 619 (new AMessage(kWhatResume, this))->post(); 620} 621 622void ACodec::initiateShutdown(bool keepComponentAllocated) { 623 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 624 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 625 msg->post(); 626 if (!keepComponentAllocated) { 627 // ensure shutdown completes in 3 seconds 628 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 629 } 630} 631 632void ACodec::signalRequestIDRFrame() { 633 (new AMessage(kWhatRequestIDRFrame, this))->post(); 634} 635 636// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 637// Some codecs may return input buffers before having them processed. 638// This causes a halt if we already signaled an EOS on the input 639// port. For now keep submitting an output buffer if there was an 640// EOS on the input port, but not yet on the output port. 641void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 642 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 643 mMetadataBuffersToSubmit > 0) { 644 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 645 } 646} 647 648status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 649 // allow keeping unset surface 650 if (surface == NULL) { 651 if (mNativeWindow != NULL) { 652 ALOGW("cannot unset a surface"); 653 return INVALID_OPERATION; 654 } 655 return OK; 656 } 657 658 // cannot switch from bytebuffers to surface 659 if (mNativeWindow == NULL) { 660 ALOGW("component was not configured with a surface"); 661 return INVALID_OPERATION; 662 } 663 664 ANativeWindow *nativeWindow = surface.get(); 665 // if we have not yet started the codec, we can simply set the native window 666 if (mBuffers[kPortIndexInput].size() == 0) { 667 mNativeWindow = surface; 668 return OK; 669 } 670 671 // we do not support changing a tunneled surface after start 672 if (mTunneled) { 673 ALOGW("cannot change tunneled surface"); 674 return INVALID_OPERATION; 675 } 676 677 int usageBits = 0; 678 // no need to reconnect as we will not dequeue all buffers 679 status_t err = setupNativeWindowSizeFormatAndUsage( 680 nativeWindow, &usageBits, 681 !storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment /* reconnect */); 682 if (err != OK) { 683 return err; 684 } 685 686 int ignoredFlags = kVideoGrallocUsage; 687 // New output surface is not allowed to add new usage flag except ignored ones. 688 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 689 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 690 return BAD_VALUE; 691 } 692 693 // get min undequeued count. We cannot switch to a surface that has a higher 694 // undequeued count than we allocated. 695 int minUndequeuedBuffers = 0; 696 err = nativeWindow->query( 697 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 698 &minUndequeuedBuffers); 699 if (err != 0) { 700 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 701 strerror(-err), -err); 702 return err; 703 } 704 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 705 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 706 minUndequeuedBuffers, mNumUndequeuedBuffers); 707 return BAD_VALUE; 708 } 709 710 // we cannot change the number of output buffers while OMX is running 711 // set up surface to the same count 712 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 713 ALOGV("setting up surface for %zu buffers", buffers.size()); 714 715 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 716 if (err != 0) { 717 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 718 -err); 719 return err; 720 } 721 722 // need to enable allocation when attaching 723 surface->getIGraphicBufferProducer()->allowAllocation(true); 724 725 // for meta data mode, we move dequeud buffers to the new surface. 726 // for non-meta mode, we must move all registered buffers 727 for (size_t i = 0; i < buffers.size(); ++i) { 728 const BufferInfo &info = buffers[i]; 729 // skip undequeued buffers for meta data mode 730 if (storingMetadataInDecodedBuffers() 731 && !mLegacyAdaptiveExperiment 732 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 733 ALOGV("skipping buffer"); 734 continue; 735 } 736 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 737 738 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 739 if (err != OK) { 740 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 741 info.mGraphicBuffer->getNativeBuffer(), 742 strerror(-err), -err); 743 return err; 744 } 745 } 746 747 // cancel undequeued buffers to new surface 748 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 749 for (size_t i = 0; i < buffers.size(); ++i) { 750 BufferInfo &info = buffers.editItemAt(i); 751 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 752 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 753 err = nativeWindow->cancelBuffer( 754 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 755 info.mFenceFd = -1; 756 if (err != OK) { 757 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 758 info.mGraphicBuffer->getNativeBuffer(), 759 strerror(-err), -err); 760 return err; 761 } 762 } 763 } 764 // disallow further allocation 765 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 766 } 767 768 // push blank buffers to previous window if requested 769 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 770 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 771 } 772 773 mNativeWindow = nativeWindow; 774 mNativeWindowUsageBits = usageBits; 775 return OK; 776} 777 778status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 779 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 780 781 CHECK(mDealer[portIndex] == NULL); 782 CHECK(mBuffers[portIndex].isEmpty()); 783 784 status_t err; 785 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 786 if (storingMetadataInDecodedBuffers()) { 787 err = allocateOutputMetadataBuffers(); 788 } else { 789 err = allocateOutputBuffersFromNativeWindow(); 790 } 791 } else { 792 OMX_PARAM_PORTDEFINITIONTYPE def; 793 InitOMXParams(&def); 794 def.nPortIndex = portIndex; 795 796 err = mOMX->getParameter( 797 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 798 799 if (err == OK) { 800 MetadataBufferType type = 801 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 802 size_t bufSize = def.nBufferSize; 803 if (type == kMetadataBufferTypeANWBuffer) { 804 bufSize = sizeof(VideoNativeMetadata); 805 } else if (type == kMetadataBufferTypeNativeHandleSource) { 806 bufSize = sizeof(VideoNativeHandleMetadata); 807 } 808 809 // If using gralloc or native source input metadata buffers, allocate largest 810 // metadata size as we prefer to generate native source metadata, but component 811 // may require gralloc source. For camera source, allocate at least enough 812 // size for native metadata buffers. 813 size_t allottedSize = bufSize; 814 if (portIndex == kPortIndexInput && type == kMetadataBufferTypeANWBuffer) { 815 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 816 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 817 bufSize = max(bufSize, sizeof(VideoNativeMetadata)); 818 } 819 820 size_t conversionBufferSize = 0; 821 822 sp<DataConverter> converter = mConverter[portIndex]; 823 if (converter != NULL) { 824 // here we assume sane conversions of max 4:1, so result fits in int32 825 if (portIndex == kPortIndexInput) { 826 conversionBufferSize = converter->sourceSize(bufSize); 827 } else { 828 conversionBufferSize = converter->targetSize(bufSize); 829 } 830 } 831 832 size_t alignment = MemoryDealer::getAllocationAlignment(); 833 834 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", 835 mComponentName.c_str(), 836 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 837 portIndex == kPortIndexInput ? "input" : "output"); 838 839 // verify buffer sizes to avoid overflow in align() 840 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { 841 ALOGE("b/22885421"); 842 return NO_MEMORY; 843 } 844 845 // don't modify bufSize as OMX may not expect it to increase after negotiation 846 size_t alignedSize = align(bufSize, alignment); 847 size_t alignedConvSize = align(conversionBufferSize, alignment); 848 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { 849 ALOGE("b/22885421"); 850 return NO_MEMORY; 851 } 852 853 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); 854 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 855 856 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 857 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 858 if (mem == NULL || mem->pointer() == NULL) { 859 return NO_MEMORY; 860 } 861 862 BufferInfo info; 863 info.mStatus = BufferInfo::OWNED_BY_US; 864 info.mFenceFd = -1; 865 info.mRenderInfo = NULL; 866 info.mNativeHandle = NULL; 867 868 uint32_t requiresAllocateBufferBit = 869 (portIndex == kPortIndexInput) 870 ? kRequiresAllocateBufferOnInputPorts 871 : kRequiresAllocateBufferOnOutputPorts; 872 873 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 874 mem.clear(); 875 876 void *ptr = NULL; 877 sp<NativeHandle> native_handle; 878 err = mOMX->allocateSecureBuffer( 879 mNode, portIndex, bufSize, &info.mBufferID, 880 &ptr, &native_handle); 881 882 // TRICKY: this representation is unorthodox, but ACodec requires 883 // an ABuffer with a proper size to validate range offsets and lengths. 884 // Since mData is never referenced for secure input, it is used to store 885 // either the pointer to the secure buffer, or the opaque handle as on 886 // some devices ptr is actually an opaque handle, not a pointer. 887 888 // TRICKY2: use native handle as the base of the ABuffer if received one, 889 // because Widevine source only receives these base addresses. 890 const native_handle_t *native_handle_ptr = 891 native_handle == NULL ? NULL : native_handle->handle(); 892 info.mData = new ABuffer( 893 ptr != NULL ? ptr : (void *)native_handle_ptr, bufSize); 894 info.mNativeHandle = native_handle; 895 info.mCodecData = info.mData; 896 } else if (mQuirks & requiresAllocateBufferBit) { 897 err = mOMX->allocateBufferWithBackup( 898 mNode, portIndex, mem, &info.mBufferID, allottedSize); 899 } else { 900 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize); 901 } 902 903 if (mem != NULL) { 904 info.mCodecData = new ABuffer(mem->pointer(), bufSize); 905 info.mCodecRef = mem; 906 907 if (type == kMetadataBufferTypeANWBuffer) { 908 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 909 } 910 911 // if we require conversion, allocate conversion buffer for client use; 912 // otherwise, reuse codec buffer 913 if (mConverter[portIndex] != NULL) { 914 CHECK_GT(conversionBufferSize, (size_t)0); 915 mem = mDealer[portIndex]->allocate(conversionBufferSize); 916 if (mem == NULL|| mem->pointer() == NULL) { 917 return NO_MEMORY; 918 } 919 info.mData = new ABuffer(mem->pointer(), conversionBufferSize); 920 info.mMemRef = mem; 921 } else { 922 info.mData = info.mCodecData; 923 info.mMemRef = info.mCodecRef; 924 } 925 } 926 927 mBuffers[portIndex].push(info); 928 } 929 } 930 } 931 932 if (err != OK) { 933 return err; 934 } 935 936 sp<AMessage> notify = mNotify->dup(); 937 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 938 939 notify->setInt32("portIndex", portIndex); 940 941 sp<PortDescription> desc = new PortDescription; 942 943 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 944 const BufferInfo &info = mBuffers[portIndex][i]; 945 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef); 946 } 947 948 notify->setObject("portDesc", desc); 949 notify->post(); 950 951 return OK; 952} 953 954status_t ACodec::setupNativeWindowSizeFormatAndUsage( 955 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */, 956 bool reconnect) { 957 OMX_PARAM_PORTDEFINITIONTYPE def; 958 InitOMXParams(&def); 959 def.nPortIndex = kPortIndexOutput; 960 961 status_t err = mOMX->getParameter( 962 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 963 964 if (err != OK) { 965 return err; 966 } 967 968 OMX_U32 usage = 0; 969 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 970 if (err != 0) { 971 ALOGW("querying usage flags from OMX IL component failed: %d", err); 972 // XXX: Currently this error is logged, but not fatal. 973 usage = 0; 974 } 975 int omxUsage = usage; 976 977 if (mFlags & kFlagIsGrallocUsageProtected) { 978 usage |= GRALLOC_USAGE_PROTECTED; 979 } 980 981 usage |= kVideoGrallocUsage; 982 *finalUsage = usage; 983 984 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 985 mLastNativeWindowDataSpace = HAL_DATASPACE_UNKNOWN; 986 987 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 988 return setNativeWindowSizeFormatAndUsage( 989 nativeWindow, 990 def.format.video.nFrameWidth, 991 def.format.video.nFrameHeight, 992 def.format.video.eColorFormat, 993 mRotationDegrees, 994 usage, 995 reconnect); 996} 997 998status_t ACodec::configureOutputBuffersFromNativeWindow( 999 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 1000 OMX_U32 *minUndequeuedBuffers, bool preregister) { 1001 1002 OMX_PARAM_PORTDEFINITIONTYPE def; 1003 InitOMXParams(&def); 1004 def.nPortIndex = kPortIndexOutput; 1005 1006 status_t err = mOMX->getParameter( 1007 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1008 1009 if (err == OK) { 1010 err = setupNativeWindowSizeFormatAndUsage( 1011 mNativeWindow.get(), &mNativeWindowUsageBits, preregister /* reconnect */); 1012 } 1013 if (err != OK) { 1014 mNativeWindowUsageBits = 0; 1015 return err; 1016 } 1017 1018 // Exits here for tunneled video playback codecs -- i.e. skips native window 1019 // buffer allocation step as this is managed by the tunneled OMX omponent 1020 // itself and explicitly sets def.nBufferCountActual to 0. 1021 if (mTunneled) { 1022 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 1023 def.nBufferCountActual = 0; 1024 err = mOMX->setParameter( 1025 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1026 1027 *minUndequeuedBuffers = 0; 1028 *bufferCount = 0; 1029 *bufferSize = 0; 1030 return err; 1031 } 1032 1033 *minUndequeuedBuffers = 0; 1034 err = mNativeWindow->query( 1035 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 1036 (int *)minUndequeuedBuffers); 1037 1038 if (err != 0) { 1039 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 1040 strerror(-err), -err); 1041 return err; 1042 } 1043 1044 // FIXME: assume that surface is controlled by app (native window 1045 // returns the number for the case when surface is not controlled by app) 1046 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 1047 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 1048 1049 // Use conservative allocation while also trying to reduce starvation 1050 // 1051 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 1052 // minimum needed for the consumer to be able to work 1053 // 2. try to allocate two (2) additional buffers to reduce starvation from 1054 // the consumer 1055 // plus an extra buffer to account for incorrect minUndequeuedBufs 1056 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 1057 OMX_U32 newBufferCount = 1058 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1059 def.nBufferCountActual = newBufferCount; 1060 err = mOMX->setParameter( 1061 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1062 1063 if (err == OK) { 1064 *minUndequeuedBuffers += extraBuffers; 1065 break; 1066 } 1067 1068 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1069 mComponentName.c_str(), newBufferCount, err); 1070 /* exit condition */ 1071 if (extraBuffers == 0) { 1072 return err; 1073 } 1074 } 1075 1076 err = native_window_set_buffer_count( 1077 mNativeWindow.get(), def.nBufferCountActual); 1078 1079 if (err != 0) { 1080 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1081 -err); 1082 return err; 1083 } 1084 1085 *bufferCount = def.nBufferCountActual; 1086 *bufferSize = def.nBufferSize; 1087 return err; 1088} 1089 1090status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1091 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1092 status_t err = configureOutputBuffersFromNativeWindow( 1093 &bufferCount, &bufferSize, &minUndequeuedBuffers, true /* preregister */); 1094 if (err != 0) 1095 return err; 1096 mNumUndequeuedBuffers = minUndequeuedBuffers; 1097 1098 if (!storingMetadataInDecodedBuffers()) { 1099 static_cast<Surface*>(mNativeWindow.get()) 1100 ->getIGraphicBufferProducer()->allowAllocation(true); 1101 } 1102 1103 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1104 "output port", 1105 mComponentName.c_str(), bufferCount, bufferSize); 1106 1107 // Dequeue buffers and send them to OMX 1108 for (OMX_U32 i = 0; i < bufferCount; i++) { 1109 ANativeWindowBuffer *buf; 1110 int fenceFd; 1111 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1112 if (err != 0) { 1113 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1114 break; 1115 } 1116 1117 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1118 BufferInfo info; 1119 info.mStatus = BufferInfo::OWNED_BY_US; 1120 info.mFenceFd = fenceFd; 1121 info.mIsReadFence = false; 1122 info.mRenderInfo = NULL; 1123 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 1124 info.mCodecData = info.mData; 1125 info.mGraphicBuffer = graphicBuffer; 1126 mBuffers[kPortIndexOutput].push(info); 1127 1128 IOMX::buffer_id bufferId; 1129 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 1130 &bufferId); 1131 if (err != 0) { 1132 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1133 "%d", i, err); 1134 break; 1135 } 1136 1137 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1138 1139 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1140 mComponentName.c_str(), 1141 bufferId, graphicBuffer.get()); 1142 } 1143 1144 OMX_U32 cancelStart; 1145 OMX_U32 cancelEnd; 1146 1147 if (err != 0) { 1148 // If an error occurred while dequeuing we need to cancel any buffers 1149 // that were dequeued. 1150 cancelStart = 0; 1151 cancelEnd = mBuffers[kPortIndexOutput].size(); 1152 } else { 1153 // Return the required minimum undequeued buffers to the native window. 1154 cancelStart = bufferCount - minUndequeuedBuffers; 1155 cancelEnd = bufferCount; 1156 } 1157 1158 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1159 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1160 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1161 status_t error = cancelBufferToNativeWindow(info); 1162 if (err == 0) { 1163 err = error; 1164 } 1165 } 1166 } 1167 1168 if (!storingMetadataInDecodedBuffers()) { 1169 static_cast<Surface*>(mNativeWindow.get()) 1170 ->getIGraphicBufferProducer()->allowAllocation(false); 1171 } 1172 1173 return err; 1174} 1175 1176status_t ACodec::allocateOutputMetadataBuffers() { 1177 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1178 status_t err = configureOutputBuffersFromNativeWindow( 1179 &bufferCount, &bufferSize, &minUndequeuedBuffers, 1180 mLegacyAdaptiveExperiment /* preregister */); 1181 if (err != 0) 1182 return err; 1183 mNumUndequeuedBuffers = minUndequeuedBuffers; 1184 1185 ALOGV("[%s] Allocating %u meta buffers on output port", 1186 mComponentName.c_str(), bufferCount); 1187 1188 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1189 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1190 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1191 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1192 1193 // Dequeue buffers and send them to OMX 1194 for (OMX_U32 i = 0; i < bufferCount; i++) { 1195 BufferInfo info; 1196 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1197 info.mFenceFd = -1; 1198 info.mRenderInfo = NULL; 1199 info.mGraphicBuffer = NULL; 1200 info.mDequeuedAt = mDequeueCounter; 1201 1202 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1203 if (mem == NULL || mem->pointer() == NULL) { 1204 return NO_MEMORY; 1205 } 1206 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1207 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1208 } 1209 info.mData = new ABuffer(mem->pointer(), mem->size()); 1210 info.mMemRef = mem; 1211 info.mCodecData = info.mData; 1212 info.mCodecRef = mem; 1213 1214 // we use useBuffer for metadata regardless of quirks 1215 err = mOMX->useBuffer( 1216 mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1217 mBuffers[kPortIndexOutput].push(info); 1218 1219 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1220 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1221 } 1222 1223 if (mLegacyAdaptiveExperiment) { 1224 // preallocate and preregister buffers 1225 static_cast<Surface *>(mNativeWindow.get()) 1226 ->getIGraphicBufferProducer()->allowAllocation(true); 1227 1228 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1229 "output port", 1230 mComponentName.c_str(), bufferCount, bufferSize); 1231 1232 // Dequeue buffers then cancel them all 1233 for (OMX_U32 i = 0; i < bufferCount; i++) { 1234 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1235 1236 ANativeWindowBuffer *buf; 1237 int fenceFd; 1238 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1239 if (err != 0) { 1240 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1241 break; 1242 } 1243 1244 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1245 mOMX->updateGraphicBufferInMeta( 1246 mNode, kPortIndexOutput, graphicBuffer, info->mBufferID); 1247 info->mStatus = BufferInfo::OWNED_BY_US; 1248 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1249 info->mGraphicBuffer = graphicBuffer; 1250 } 1251 1252 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1253 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1254 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1255 status_t error = cancelBufferToNativeWindow(info); 1256 if (err == OK) { 1257 err = error; 1258 } 1259 } 1260 } 1261 1262 static_cast<Surface*>(mNativeWindow.get()) 1263 ->getIGraphicBufferProducer()->allowAllocation(false); 1264 } 1265 1266 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1267 return err; 1268} 1269 1270status_t ACodec::submitOutputMetadataBuffer() { 1271 CHECK(storingMetadataInDecodedBuffers()); 1272 if (mMetadataBuffersToSubmit == 0) 1273 return OK; 1274 1275 BufferInfo *info = dequeueBufferFromNativeWindow(); 1276 if (info == NULL) { 1277 return ERROR_IO; 1278 } 1279 1280 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1281 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1282 1283 --mMetadataBuffersToSubmit; 1284 info->checkWriteFence("submitOutputMetadataBuffer"); 1285 status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd); 1286 info->mFenceFd = -1; 1287 if (err == OK) { 1288 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1289 } 1290 1291 return err; 1292} 1293 1294status_t ACodec::waitForFence(int fd, const char *dbg ) { 1295 status_t res = OK; 1296 if (fd >= 0) { 1297 sp<Fence> fence = new Fence(fd); 1298 res = fence->wait(IOMX::kFenceTimeoutMs); 1299 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1300 } 1301 return res; 1302} 1303 1304// static 1305const char *ACodec::_asString(BufferInfo::Status s) { 1306 switch (s) { 1307 case BufferInfo::OWNED_BY_US: return "OUR"; 1308 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1309 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1310 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1311 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1312 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1313 default: return "?"; 1314 } 1315} 1316 1317void ACodec::dumpBuffers(OMX_U32 portIndex) { 1318 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1319 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1320 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1321 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1322 const BufferInfo &info = mBuffers[portIndex][i]; 1323 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1324 i, info.mBufferID, info.mGraphicBuffer.get(), 1325 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1326 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1327 } 1328} 1329 1330status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1331 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1332 1333 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1334 mComponentName.c_str(), info->mBufferID); 1335 1336 info->checkWriteFence("cancelBufferToNativeWindow"); 1337 int err = mNativeWindow->cancelBuffer( 1338 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1339 info->mFenceFd = -1; 1340 1341 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1342 mComponentName.c_str(), info->mBufferID); 1343 // change ownership even if cancelBuffer fails 1344 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1345 1346 return err; 1347} 1348 1349void ACodec::updateRenderInfoForDequeuedBuffer( 1350 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1351 1352 info->mRenderInfo = 1353 mRenderTracker.updateInfoForDequeuedBuffer( 1354 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1355 1356 // check for any fences already signaled 1357 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1358} 1359 1360void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1361 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1362 mRenderTracker.dumpRenderQueue(); 1363 } 1364} 1365 1366void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1367 sp<AMessage> msg = mNotify->dup(); 1368 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1369 std::list<FrameRenderTracker::Info> done = 1370 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1371 1372 // unlink untracked frames 1373 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1374 it != done.cend(); ++it) { 1375 ssize_t index = it->getIndex(); 1376 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1377 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1378 } else if (index >= 0) { 1379 // THIS SHOULD NEVER HAPPEN 1380 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1381 } 1382 } 1383 1384 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1385 msg->post(); 1386 } 1387} 1388 1389ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1390 ANativeWindowBuffer *buf; 1391 CHECK(mNativeWindow.get() != NULL); 1392 1393 if (mTunneled) { 1394 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1395 " video playback mode mode!"); 1396 return NULL; 1397 } 1398 1399 if (mFatalError) { 1400 ALOGW("not dequeuing from native window due to fatal error"); 1401 return NULL; 1402 } 1403 1404 int fenceFd = -1; 1405 do { 1406 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1407 if (err != 0) { 1408 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1409 return NULL; 1410 } 1411 1412 bool stale = false; 1413 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1414 i--; 1415 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1416 1417 if (info->mGraphicBuffer != NULL && 1418 info->mGraphicBuffer->handle == buf->handle) { 1419 // Since consumers can attach buffers to BufferQueues, it is possible 1420 // that a known yet stale buffer can return from a surface that we 1421 // once used. We can simply ignore this as we have already dequeued 1422 // this buffer properly. NOTE: this does not eliminate all cases, 1423 // e.g. it is possible that we have queued the valid buffer to the 1424 // NW, and a stale copy of the same buffer gets dequeued - which will 1425 // be treated as the valid buffer by ACodec. 1426 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1427 ALOGI("dequeued stale buffer %p. discarding", buf); 1428 stale = true; 1429 break; 1430 } 1431 1432 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1433 info->mStatus = BufferInfo::OWNED_BY_US; 1434 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1435 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1436 return info; 1437 } 1438 } 1439 1440 // It is also possible to receive a previously unregistered buffer 1441 // in non-meta mode. These should be treated as stale buffers. The 1442 // same is possible in meta mode, in which case, it will be treated 1443 // as a normal buffer, which is not desirable. 1444 // TODO: fix this. 1445 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1446 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1447 stale = true; 1448 } 1449 if (stale) { 1450 // TODO: detach stale buffer, but there is no API yet to do it. 1451 buf = NULL; 1452 } 1453 } while (buf == NULL); 1454 1455 // get oldest undequeued buffer 1456 BufferInfo *oldest = NULL; 1457 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1458 i--; 1459 BufferInfo *info = 1460 &mBuffers[kPortIndexOutput].editItemAt(i); 1461 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1462 (oldest == NULL || 1463 // avoid potential issues from counter rolling over 1464 mDequeueCounter - info->mDequeuedAt > 1465 mDequeueCounter - oldest->mDequeuedAt)) { 1466 oldest = info; 1467 } 1468 } 1469 1470 // it is impossible dequeue a buffer when there are no buffers with ANW 1471 CHECK(oldest != NULL); 1472 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1473 // while loop above does not complete 1474 CHECK(storingMetadataInDecodedBuffers()); 1475 1476 // discard buffer in LRU info and replace with new buffer 1477 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1478 oldest->mStatus = BufferInfo::OWNED_BY_US; 1479 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1480 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1481 oldest->mRenderInfo = NULL; 1482 1483 mOMX->updateGraphicBufferInMeta( 1484 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 1485 oldest->mBufferID); 1486 1487 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1488 VideoGrallocMetadata *grallocMeta = 1489 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base()); 1490 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1491 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1492 mDequeueCounter - oldest->mDequeuedAt, 1493 (void *)(uintptr_t)grallocMeta->pHandle, 1494 oldest->mGraphicBuffer->handle, oldest->mData->base()); 1495 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1496 VideoNativeMetadata *nativeMeta = 1497 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base()); 1498 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1499 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1500 mDequeueCounter - oldest->mDequeuedAt, 1501 (void *)(uintptr_t)nativeMeta->pBuffer, 1502 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base()); 1503 } 1504 1505 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1506 return oldest; 1507} 1508 1509status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1510 status_t err = OK; 1511 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1512 i--; 1513 status_t err2 = freeBuffer(portIndex, i); 1514 if (err == OK) { 1515 err = err2; 1516 } 1517 } 1518 1519 // clear mDealer even on an error 1520 mDealer[portIndex].clear(); 1521 return err; 1522} 1523 1524status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1525 status_t err = OK; 1526 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1527 i--; 1528 BufferInfo *info = 1529 &mBuffers[kPortIndexOutput].editItemAt(i); 1530 1531 // At this time some buffers may still be with the component 1532 // or being drained. 1533 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1534 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1535 status_t err2 = freeBuffer(kPortIndexOutput, i); 1536 if (err == OK) { 1537 err = err2; 1538 } 1539 } 1540 } 1541 1542 return err; 1543} 1544 1545status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1546 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1547 status_t err = OK; 1548 1549 // there should not be any fences in the metadata 1550 MetadataBufferType type = 1551 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1552 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL 1553 && info->mData->size() >= sizeof(VideoNativeMetadata)) { 1554 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; 1555 if (fenceFd >= 0) { 1556 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1557 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1558 } 1559 } 1560 1561 switch (info->mStatus) { 1562 case BufferInfo::OWNED_BY_US: 1563 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1564 (void)cancelBufferToNativeWindow(info); 1565 } 1566 // fall through 1567 1568 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1569 err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID); 1570 break; 1571 1572 default: 1573 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1574 err = FAILED_TRANSACTION; 1575 break; 1576 } 1577 1578 if (info->mFenceFd >= 0) { 1579 ::close(info->mFenceFd); 1580 } 1581 1582 if (portIndex == kPortIndexOutput) { 1583 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1584 info->mRenderInfo = NULL; 1585 } 1586 1587 // remove buffer even if mOMX->freeBuffer fails 1588 mBuffers[portIndex].removeAt(i); 1589 return err; 1590} 1591 1592ACodec::BufferInfo *ACodec::findBufferByID( 1593 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1594 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1595 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1596 1597 if (info->mBufferID == bufferID) { 1598 if (index != NULL) { 1599 *index = i; 1600 } 1601 return info; 1602 } 1603 } 1604 1605 ALOGE("Could not find buffer with ID %u", bufferID); 1606 return NULL; 1607} 1608 1609status_t ACodec::setComponentRole( 1610 bool isEncoder, const char *mime) { 1611 const char *role = GetComponentRole(isEncoder, mime); 1612 if (role == NULL) { 1613 return BAD_VALUE; 1614 } 1615 status_t err = SetComponentRole(mOMX, mNode, role); 1616 if (err != OK) { 1617 ALOGW("[%s] Failed to set standard component role '%s'.", 1618 mComponentName.c_str(), role); 1619 } 1620 return err; 1621} 1622 1623status_t ACodec::configureCodec( 1624 const char *mime, const sp<AMessage> &msg) { 1625 int32_t encoder; 1626 if (!msg->findInt32("encoder", &encoder)) { 1627 encoder = false; 1628 } 1629 1630 sp<AMessage> inputFormat = new AMessage; 1631 sp<AMessage> outputFormat = new AMessage; 1632 mConfigFormat = msg; 1633 1634 mIsEncoder = encoder; 1635 1636 mInputMetadataType = kMetadataBufferTypeInvalid; 1637 mOutputMetadataType = kMetadataBufferTypeInvalid; 1638 1639 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1640 1641 if (err != OK) { 1642 return err; 1643 } 1644 1645 int32_t bitRate = 0; 1646 // FLAC encoder doesn't need a bitrate, other encoders do 1647 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1648 && !msg->findInt32("bitrate", &bitRate)) { 1649 return INVALID_OPERATION; 1650 } 1651 1652 // propagate bitrate to the output so that the muxer has it 1653 if (encoder && msg->findInt32("bitrate", &bitRate)) { 1654 // Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the 1655 // average bitrate. We've been setting both bitrate and max-bitrate to this same value. 1656 outputFormat->setInt32("bitrate", bitRate); 1657 outputFormat->setInt32("max-bitrate", bitRate); 1658 } 1659 1660 int32_t storeMeta; 1661 if (encoder 1662 && msg->findInt32("android._input-metadata-buffer-type", &storeMeta) 1663 && storeMeta != kMetadataBufferTypeInvalid) { 1664 mInputMetadataType = (MetadataBufferType)storeMeta; 1665 err = mOMX->storeMetaDataInBuffers( 1666 mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1667 if (err != OK) { 1668 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1669 mComponentName.c_str(), err); 1670 1671 return err; 1672 } else if (storeMeta == kMetadataBufferTypeANWBuffer 1673 && mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1674 // IOMX translates ANWBuffers to gralloc source already. 1675 mInputMetadataType = (MetadataBufferType)storeMeta; 1676 } 1677 1678 uint32_t usageBits; 1679 if (mOMX->getParameter( 1680 mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1681 &usageBits, sizeof(usageBits)) == OK) { 1682 inputFormat->setInt32( 1683 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1684 } 1685 } 1686 1687 int32_t prependSPSPPS = 0; 1688 if (encoder 1689 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1690 && prependSPSPPS != 0) { 1691 OMX_INDEXTYPE index; 1692 err = mOMX->getExtensionIndex( 1693 mNode, 1694 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1695 &index); 1696 1697 if (err == OK) { 1698 PrependSPSPPSToIDRFramesParams params; 1699 InitOMXParams(¶ms); 1700 params.bEnable = OMX_TRUE; 1701 1702 err = mOMX->setParameter( 1703 mNode, index, ¶ms, sizeof(params)); 1704 } 1705 1706 if (err != OK) { 1707 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1708 "IDR frames. (err %d)", err); 1709 1710 return err; 1711 } 1712 } 1713 1714 // Only enable metadata mode on encoder output if encoder can prepend 1715 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1716 // opaque handle, to which we don't have access. 1717 int32_t video = !strncasecmp(mime, "video/", 6); 1718 mIsVideo = video; 1719 if (encoder && video) { 1720 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1721 && msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta) 1722 && storeMeta != 0); 1723 1724 mOutputMetadataType = kMetadataBufferTypeNativeHandleSource; 1725 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType); 1726 if (err != OK) { 1727 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1728 mComponentName.c_str(), err); 1729 } 1730 1731 if (!msg->findInt64( 1732 "repeat-previous-frame-after", 1733 &mRepeatFrameDelayUs)) { 1734 mRepeatFrameDelayUs = -1ll; 1735 } 1736 1737 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1738 mMaxPtsGapUs = -1ll; 1739 } 1740 1741 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1742 mMaxFps = -1; 1743 } 1744 1745 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1746 mTimePerCaptureUs = -1ll; 1747 } 1748 1749 if (!msg->findInt32( 1750 "create-input-buffers-suspended", 1751 (int32_t*)&mCreateInputBuffersSuspended)) { 1752 mCreateInputBuffersSuspended = false; 1753 } 1754 } 1755 1756 // NOTE: we only use native window for video decoders 1757 sp<RefBase> obj; 1758 bool haveNativeWindow = msg->findObject("native-window", &obj) 1759 && obj != NULL && video && !encoder; 1760 mUsingNativeWindow = haveNativeWindow; 1761 mLegacyAdaptiveExperiment = false; 1762 if (video && !encoder) { 1763 inputFormat->setInt32("adaptive-playback", false); 1764 1765 int32_t usageProtected; 1766 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1767 if (!haveNativeWindow) { 1768 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1769 return PERMISSION_DENIED; 1770 } 1771 mFlags |= kFlagIsGrallocUsageProtected; 1772 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1773 } 1774 1775 if (mFlags & kFlagIsSecure) { 1776 // use native_handles for secure input buffers 1777 err = mOMX->enableNativeBuffers( 1778 mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1779 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1780 err = OK; // ignore error for now 1781 } 1782 } 1783 if (haveNativeWindow) { 1784 sp<ANativeWindow> nativeWindow = 1785 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1786 1787 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1788 int32_t autoFrc; 1789 if (msg->findInt32("auto-frc", &autoFrc)) { 1790 bool enabled = autoFrc; 1791 OMX_CONFIG_BOOLEANTYPE config; 1792 InitOMXParams(&config); 1793 config.bEnabled = (OMX_BOOL)enabled; 1794 status_t temp = mOMX->setConfig( 1795 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1796 &config, sizeof(config)); 1797 if (temp == OK) { 1798 outputFormat->setInt32("auto-frc", enabled); 1799 } else if (enabled) { 1800 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1801 } 1802 } 1803 // END of temporary support for automatic FRC 1804 1805 int32_t tunneled; 1806 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1807 tunneled != 0) { 1808 ALOGI("Configuring TUNNELED video playback."); 1809 mTunneled = true; 1810 1811 int32_t audioHwSync = 0; 1812 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1813 ALOGW("No Audio HW Sync provided for video tunnel"); 1814 } 1815 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1816 if (err != OK) { 1817 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1818 audioHwSync, nativeWindow.get()); 1819 return err; 1820 } 1821 1822 int32_t maxWidth = 0, maxHeight = 0; 1823 if (msg->findInt32("max-width", &maxWidth) && 1824 msg->findInt32("max-height", &maxHeight)) { 1825 1826 err = mOMX->prepareForAdaptivePlayback( 1827 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1828 if (err != OK) { 1829 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1830 mComponentName.c_str(), err); 1831 // allow failure 1832 err = OK; 1833 } else { 1834 inputFormat->setInt32("max-width", maxWidth); 1835 inputFormat->setInt32("max-height", maxHeight); 1836 inputFormat->setInt32("adaptive-playback", true); 1837 } 1838 } 1839 } else { 1840 ALOGV("Configuring CPU controlled video playback."); 1841 mTunneled = false; 1842 1843 // Explicity reset the sideband handle of the window for 1844 // non-tunneled video in case the window was previously used 1845 // for a tunneled video playback. 1846 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1847 if (err != OK) { 1848 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1849 return err; 1850 } 1851 1852 // Always try to enable dynamic output buffers on native surface 1853 mOutputMetadataType = kMetadataBufferTypeANWBuffer; 1854 err = mOMX->storeMetaDataInBuffers( 1855 mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1856 if (err != OK) { 1857 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1858 mComponentName.c_str(), err); 1859 1860 // if adaptive playback has been requested, try JB fallback 1861 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1862 // LARGE MEMORY REQUIREMENT 1863 1864 // we will not do adaptive playback on software accessed 1865 // surfaces as they never had to respond to changes in the 1866 // crop window, and we don't trust that they will be able to. 1867 int usageBits = 0; 1868 bool canDoAdaptivePlayback; 1869 1870 if (nativeWindow->query( 1871 nativeWindow.get(), 1872 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1873 &usageBits) != OK) { 1874 canDoAdaptivePlayback = false; 1875 } else { 1876 canDoAdaptivePlayback = 1877 (usageBits & 1878 (GRALLOC_USAGE_SW_READ_MASK | 1879 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1880 } 1881 1882 int32_t maxWidth = 0, maxHeight = 0; 1883 if (canDoAdaptivePlayback && 1884 msg->findInt32("max-width", &maxWidth) && 1885 msg->findInt32("max-height", &maxHeight)) { 1886 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1887 mComponentName.c_str(), maxWidth, maxHeight); 1888 1889 err = mOMX->prepareForAdaptivePlayback( 1890 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1891 maxHeight); 1892 ALOGW_IF(err != OK, 1893 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1894 mComponentName.c_str(), err); 1895 1896 if (err == OK) { 1897 inputFormat->setInt32("max-width", maxWidth); 1898 inputFormat->setInt32("max-height", maxHeight); 1899 inputFormat->setInt32("adaptive-playback", true); 1900 } 1901 } 1902 // allow failure 1903 err = OK; 1904 } else { 1905 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1906 mComponentName.c_str()); 1907 CHECK(storingMetadataInDecodedBuffers()); 1908 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 1909 "legacy-adaptive", !msg->contains("no-experiments")); 1910 1911 inputFormat->setInt32("adaptive-playback", true); 1912 } 1913 1914 int32_t push; 1915 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 1916 && push != 0) { 1917 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1918 } 1919 } 1920 1921 int32_t rotationDegrees; 1922 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 1923 mRotationDegrees = rotationDegrees; 1924 } else { 1925 mRotationDegrees = 0; 1926 } 1927 } 1928 1929 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 1930 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 1931 // invalid encodings will default to PCM-16bit in setupRawAudioFormat. 1932 1933 if (video) { 1934 // determine need for software renderer 1935 bool usingSwRenderer = false; 1936 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 1937 usingSwRenderer = true; 1938 haveNativeWindow = false; 1939 } 1940 1941 if (encoder) { 1942 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 1943 } else { 1944 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 1945 } 1946 1947 if (err != OK) { 1948 return err; 1949 } 1950 1951 if (haveNativeWindow) { 1952 mNativeWindow = static_cast<Surface *>(obj.get()); 1953 } 1954 1955 // initialize native window now to get actual output format 1956 // TODO: this is needed for some encoders even though they don't use native window 1957 err = initNativeWindow(); 1958 if (err != OK) { 1959 return err; 1960 } 1961 1962 // fallback for devices that do not handle flex-YUV for native buffers 1963 if (haveNativeWindow) { 1964 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 1965 if (msg->findInt32("color-format", &requestedColorFormat) && 1966 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 1967 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 1968 if (err != OK) { 1969 return err; 1970 } 1971 int32_t colorFormat = OMX_COLOR_FormatUnused; 1972 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 1973 if (!outputFormat->findInt32("color-format", &colorFormat)) { 1974 ALOGE("ouptut port did not have a color format (wrong domain?)"); 1975 return BAD_VALUE; 1976 } 1977 ALOGD("[%s] Requested output format %#x and got %#x.", 1978 mComponentName.c_str(), requestedColorFormat, colorFormat); 1979 if (!IsFlexibleColorFormat( 1980 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 1981 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 1982 // device did not handle flex-YUV request for native window, fall back 1983 // to SW renderer 1984 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 1985 mNativeWindow.clear(); 1986 mNativeWindowUsageBits = 0; 1987 haveNativeWindow = false; 1988 usingSwRenderer = true; 1989 if (storingMetadataInDecodedBuffers()) { 1990 err = mOMX->storeMetaDataInBuffers( 1991 mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 1992 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 1993 // TODO: implement adaptive-playback support for bytebuffer mode. 1994 // This is done by SW codecs, but most HW codecs don't support it. 1995 inputFormat->setInt32("adaptive-playback", false); 1996 } 1997 if (err == OK) { 1998 err = mOMX->enableNativeBuffers( 1999 mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 2000 } 2001 if (mFlags & kFlagIsGrallocUsageProtected) { 2002 // fallback is not supported for protected playback 2003 err = PERMISSION_DENIED; 2004 } else if (err == OK) { 2005 err = setupVideoDecoder( 2006 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2007 } 2008 } 2009 } 2010 } 2011 2012 if (usingSwRenderer) { 2013 outputFormat->setInt32("using-sw-renderer", 1); 2014 } 2015 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2016 int32_t numChannels, sampleRate; 2017 if (!msg->findInt32("channel-count", &numChannels) 2018 || !msg->findInt32("sample-rate", &sampleRate)) { 2019 // Since we did not always check for these, leave them optional 2020 // and have the decoder figure it all out. 2021 err = OK; 2022 } else { 2023 err = setupRawAudioFormat( 2024 encoder ? kPortIndexInput : kPortIndexOutput, 2025 sampleRate, 2026 numChannels); 2027 } 2028 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2029 int32_t numChannels, sampleRate; 2030 if (!msg->findInt32("channel-count", &numChannels) 2031 || !msg->findInt32("sample-rate", &sampleRate)) { 2032 err = INVALID_OPERATION; 2033 } else { 2034 int32_t isADTS, aacProfile; 2035 int32_t sbrMode; 2036 int32_t maxOutputChannelCount; 2037 int32_t pcmLimiterEnable; 2038 drcParams_t drc; 2039 if (!msg->findInt32("is-adts", &isADTS)) { 2040 isADTS = 0; 2041 } 2042 if (!msg->findInt32("aac-profile", &aacProfile)) { 2043 aacProfile = OMX_AUDIO_AACObjectNull; 2044 } 2045 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2046 sbrMode = -1; 2047 } 2048 2049 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2050 maxOutputChannelCount = -1; 2051 } 2052 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2053 // value is unknown 2054 pcmLimiterEnable = -1; 2055 } 2056 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2057 // value is unknown 2058 drc.encodedTargetLevel = -1; 2059 } 2060 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2061 // value is unknown 2062 drc.drcCut = -1; 2063 } 2064 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2065 // value is unknown 2066 drc.drcBoost = -1; 2067 } 2068 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2069 // value is unknown 2070 drc.heavyCompression = -1; 2071 } 2072 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2073 // value is unknown 2074 drc.targetRefLevel = -1; 2075 } 2076 2077 err = setupAACCodec( 2078 encoder, numChannels, sampleRate, bitRate, aacProfile, 2079 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2080 pcmLimiterEnable); 2081 } 2082 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2083 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2084 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2085 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2086 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2087 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2088 // These are PCM-like formats with a fixed sample rate but 2089 // a variable number of channels. 2090 2091 int32_t numChannels; 2092 if (!msg->findInt32("channel-count", &numChannels)) { 2093 err = INVALID_OPERATION; 2094 } else { 2095 int32_t sampleRate; 2096 if (!msg->findInt32("sample-rate", &sampleRate)) { 2097 sampleRate = 8000; 2098 } 2099 err = setupG711Codec(encoder, sampleRate, numChannels); 2100 } 2101 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2102 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2103 if (encoder && 2104 (!msg->findInt32("channel-count", &numChannels) 2105 || !msg->findInt32("sample-rate", &sampleRate))) { 2106 ALOGE("missing channel count or sample rate for FLAC encoder"); 2107 err = INVALID_OPERATION; 2108 } else { 2109 if (encoder) { 2110 if (!msg->findInt32( 2111 "complexity", &compressionLevel) && 2112 !msg->findInt32( 2113 "flac-compression-level", &compressionLevel)) { 2114 compressionLevel = 5; // default FLAC compression level 2115 } else if (compressionLevel < 0) { 2116 ALOGW("compression level %d outside [0..8] range, " 2117 "using 0", 2118 compressionLevel); 2119 compressionLevel = 0; 2120 } else if (compressionLevel > 8) { 2121 ALOGW("compression level %d outside [0..8] range, " 2122 "using 8", 2123 compressionLevel); 2124 compressionLevel = 8; 2125 } 2126 } 2127 err = setupFlacCodec( 2128 encoder, numChannels, sampleRate, compressionLevel); 2129 } 2130 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2131 int32_t numChannels, sampleRate; 2132 if (encoder 2133 || !msg->findInt32("channel-count", &numChannels) 2134 || !msg->findInt32("sample-rate", &sampleRate)) { 2135 err = INVALID_OPERATION; 2136 } else { 2137 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); 2138 } 2139 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2140 int32_t numChannels; 2141 int32_t sampleRate; 2142 if (!msg->findInt32("channel-count", &numChannels) 2143 || !msg->findInt32("sample-rate", &sampleRate)) { 2144 err = INVALID_OPERATION; 2145 } else { 2146 err = setupAC3Codec(encoder, numChannels, sampleRate); 2147 } 2148 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2149 int32_t numChannels; 2150 int32_t sampleRate; 2151 if (!msg->findInt32("channel-count", &numChannels) 2152 || !msg->findInt32("sample-rate", &sampleRate)) { 2153 err = INVALID_OPERATION; 2154 } else { 2155 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2156 } 2157 } 2158 2159 if (err != OK) { 2160 return err; 2161 } 2162 2163 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2164 mEncoderDelay = 0; 2165 } 2166 2167 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2168 mEncoderPadding = 0; 2169 } 2170 2171 if (msg->findInt32("channel-mask", &mChannelMask)) { 2172 mChannelMaskPresent = true; 2173 } else { 2174 mChannelMaskPresent = false; 2175 } 2176 2177 int32_t maxInputSize; 2178 if (msg->findInt32("max-input-size", &maxInputSize)) { 2179 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2180 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2181 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2182 } 2183 2184 int32_t priority; 2185 if (msg->findInt32("priority", &priority)) { 2186 err = setPriority(priority); 2187 } 2188 2189 int32_t rateInt = -1; 2190 float rateFloat = -1; 2191 if (!msg->findFloat("operating-rate", &rateFloat)) { 2192 msg->findInt32("operating-rate", &rateInt); 2193 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2194 } 2195 if (rateFloat > 0) { 2196 err = setOperatingRate(rateFloat, video); 2197 } 2198 2199 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame. 2200 mBaseOutputFormat = outputFormat; 2201 // trigger a kWhatOutputFormatChanged msg on first buffer 2202 mLastOutputFormat.clear(); 2203 2204 err = getPortFormat(kPortIndexInput, inputFormat); 2205 if (err == OK) { 2206 err = getPortFormat(kPortIndexOutput, outputFormat); 2207 if (err == OK) { 2208 mInputFormat = inputFormat; 2209 mOutputFormat = outputFormat; 2210 } 2211 } 2212 2213 // create data converters if needed 2214 if (!video && err == OK) { 2215 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 2216 if (encoder) { 2217 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2218 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); 2219 if (mConverter[kPortIndexInput] != NULL) { 2220 mInputFormat->setInt32("pcm-encoding", pcmEncoding); 2221 } 2222 } else { 2223 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2224 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 2225 if (mConverter[kPortIndexOutput] != NULL) { 2226 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 2227 } 2228 } 2229 } 2230 2231 return err; 2232} 2233 2234status_t ACodec::setPriority(int32_t priority) { 2235 if (priority < 0) { 2236 return BAD_VALUE; 2237 } 2238 OMX_PARAM_U32TYPE config; 2239 InitOMXParams(&config); 2240 config.nU32 = (OMX_U32)priority; 2241 status_t temp = mOMX->setConfig( 2242 mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2243 &config, sizeof(config)); 2244 if (temp != OK) { 2245 ALOGI("codec does not support config priority (err %d)", temp); 2246 } 2247 return OK; 2248} 2249 2250status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2251 if (rateFloat < 0) { 2252 return BAD_VALUE; 2253 } 2254 OMX_U32 rate; 2255 if (isVideo) { 2256 if (rateFloat > 65535) { 2257 return BAD_VALUE; 2258 } 2259 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2260 } else { 2261 if (rateFloat > UINT_MAX) { 2262 return BAD_VALUE; 2263 } 2264 rate = (OMX_U32)(rateFloat); 2265 } 2266 OMX_PARAM_U32TYPE config; 2267 InitOMXParams(&config); 2268 config.nU32 = rate; 2269 status_t err = mOMX->setConfig( 2270 mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2271 &config, sizeof(config)); 2272 if (err != OK) { 2273 ALOGI("codec does not support config operating rate (err %d)", err); 2274 } 2275 return OK; 2276} 2277 2278status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2279 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2280 InitOMXParams(¶ms); 2281 params.nPortIndex = kPortIndexOutput; 2282 status_t err = mOMX->getConfig( 2283 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2284 if (err == OK) { 2285 *intraRefreshPeriod = params.nRefreshPeriod; 2286 return OK; 2287 } 2288 2289 // Fallback to query through standard OMX index. 2290 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2291 InitOMXParams(&refreshParams); 2292 refreshParams.nPortIndex = kPortIndexOutput; 2293 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2294 err = mOMX->getParameter( 2295 mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2296 if (err != OK || refreshParams.nCirMBs == 0) { 2297 *intraRefreshPeriod = 0; 2298 return OK; 2299 } 2300 2301 // Calculate period based on width and height 2302 uint32_t width, height; 2303 OMX_PARAM_PORTDEFINITIONTYPE def; 2304 InitOMXParams(&def); 2305 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2306 def.nPortIndex = kPortIndexOutput; 2307 err = mOMX->getParameter( 2308 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2309 if (err != OK) { 2310 *intraRefreshPeriod = 0; 2311 return err; 2312 } 2313 width = video_def->nFrameWidth; 2314 height = video_def->nFrameHeight; 2315 // Use H.264/AVC MacroBlock size 16x16 2316 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2317 2318 return OK; 2319} 2320 2321status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2322 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2323 InitOMXParams(¶ms); 2324 params.nPortIndex = kPortIndexOutput; 2325 params.nRefreshPeriod = intraRefreshPeriod; 2326 status_t err = mOMX->setConfig( 2327 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2328 if (err == OK) { 2329 return OK; 2330 } 2331 2332 // Only in configure state, a component could invoke setParameter. 2333 if (!inConfigure) { 2334 return INVALID_OPERATION; 2335 } else { 2336 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2337 } 2338 2339 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2340 InitOMXParams(&refreshParams); 2341 refreshParams.nPortIndex = kPortIndexOutput; 2342 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2343 2344 if (intraRefreshPeriod == 0) { 2345 // 0 means disable intra refresh. 2346 refreshParams.nCirMBs = 0; 2347 } else { 2348 // Calculate macroblocks that need to be intra coded base on width and height 2349 uint32_t width, height; 2350 OMX_PARAM_PORTDEFINITIONTYPE def; 2351 InitOMXParams(&def); 2352 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2353 def.nPortIndex = kPortIndexOutput; 2354 err = mOMX->getParameter( 2355 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2356 if (err != OK) { 2357 return err; 2358 } 2359 width = video_def->nFrameWidth; 2360 height = video_def->nFrameHeight; 2361 // Use H.264/AVC MacroBlock size 16x16 2362 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2363 } 2364 2365 err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh, 2366 &refreshParams, sizeof(refreshParams)); 2367 if (err != OK) { 2368 return err; 2369 } 2370 2371 return OK; 2372} 2373 2374status_t ACodec::configureTemporalLayers( 2375 const sp<AMessage> &msg, bool inConfigure, sp<AMessage> &outputFormat) { 2376 if (!mIsVideo || !mIsEncoder) { 2377 return INVALID_OPERATION; 2378 } 2379 2380 AString tsSchema; 2381 if (!msg->findString("ts-schema", &tsSchema)) { 2382 return OK; 2383 } 2384 2385 unsigned int numLayers = 0; 2386 unsigned int numBLayers = 0; 2387 int tags; 2388 char dummy; 2389 OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE pattern = 2390 OMX_VIDEO_AndroidTemporalLayeringPatternNone; 2391 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1 2392 && numLayers > 0) { 2393 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC; 2394 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c", 2395 &numLayers, &dummy, &numBLayers, &dummy)) 2396 && (tags == 1 || (tags == 3 && dummy == '+')) 2397 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) { 2398 numLayers += numBLayers; 2399 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid; 2400 } else { 2401 ALOGI("Ignoring unsupported ts-schema [%s]", tsSchema.c_str()); 2402 return BAD_VALUE; 2403 } 2404 2405 OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layerParams; 2406 InitOMXParams(&layerParams); 2407 layerParams.nPortIndex = kPortIndexOutput; 2408 2409 status_t err = mOMX->getParameter( 2410 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2411 &layerParams, sizeof(layerParams)); 2412 2413 if (err != OK) { 2414 return err; 2415 } else if (!(layerParams.eSupportedPatterns & pattern)) { 2416 return BAD_VALUE; 2417 } 2418 2419 numLayers = min(numLayers, layerParams.nLayerCountMax); 2420 numBLayers = min(numBLayers, layerParams.nBLayerCountMax); 2421 2422 if (!inConfigure) { 2423 OMX_VIDEO_CONFIG_ANDROID_TEMPORALLAYERINGTYPE layerConfig; 2424 InitOMXParams(&layerConfig); 2425 layerConfig.nPortIndex = kPortIndexOutput; 2426 layerConfig.ePattern = pattern; 2427 layerConfig.nPLayerCountActual = numLayers - numBLayers; 2428 layerConfig.nBLayerCountActual = numBLayers; 2429 layerConfig.bBitrateRatiosSpecified = OMX_FALSE; 2430 2431 err = mOMX->setConfig( 2432 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidVideoTemporalLayering, 2433 &layerConfig, sizeof(layerConfig)); 2434 } else { 2435 layerParams.ePattern = pattern; 2436 layerParams.nPLayerCountActual = numLayers - numBLayers; 2437 layerParams.nBLayerCountActual = numBLayers; 2438 layerParams.bBitrateRatiosSpecified = OMX_FALSE; 2439 2440 err = mOMX->setParameter( 2441 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2442 &layerParams, sizeof(layerParams)); 2443 } 2444 2445 AString configSchema; 2446 if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternAndroid) { 2447 configSchema = AStringPrintf("android.generic.%u+%u", numLayers - numBLayers, numBLayers); 2448 } else if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC) { 2449 configSchema = AStringPrintf("webrtc.vp8.%u", numLayers); 2450 } 2451 2452 if (err != OK) { 2453 ALOGW("Failed to set temporal layers to %s (requested %s)", 2454 configSchema.c_str(), tsSchema.c_str()); 2455 return err; 2456 } 2457 2458 err = mOMX->getParameter( 2459 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2460 &layerParams, sizeof(layerParams)); 2461 2462 if (err == OK) { 2463 ALOGD("Temporal layers requested:%s configured:%s got:%s(%u: P=%u, B=%u)", 2464 tsSchema.c_str(), configSchema.c_str(), 2465 asString(layerParams.ePattern), layerParams.ePattern, 2466 layerParams.nPLayerCountActual, layerParams.nBLayerCountActual); 2467 2468 if (outputFormat.get() == mOutputFormat.get()) { 2469 mOutputFormat = mOutputFormat->dup(); // trigger an output format change event 2470 } 2471 // assume we got what we configured 2472 outputFormat->setString("ts-schema", configSchema); 2473 } 2474 return err; 2475} 2476 2477status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2478 OMX_PARAM_PORTDEFINITIONTYPE def; 2479 InitOMXParams(&def); 2480 def.nPortIndex = portIndex; 2481 2482 status_t err = mOMX->getParameter( 2483 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2484 2485 if (err != OK) { 2486 return err; 2487 } 2488 2489 if (def.nBufferSize >= size) { 2490 return OK; 2491 } 2492 2493 def.nBufferSize = size; 2494 2495 err = mOMX->setParameter( 2496 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2497 2498 if (err != OK) { 2499 return err; 2500 } 2501 2502 err = mOMX->getParameter( 2503 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2504 2505 if (err != OK) { 2506 return err; 2507 } 2508 2509 if (def.nBufferSize < size) { 2510 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2511 return FAILED_TRANSACTION; 2512 } 2513 2514 return OK; 2515} 2516 2517status_t ACodec::selectAudioPortFormat( 2518 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2519 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2520 InitOMXParams(&format); 2521 2522 format.nPortIndex = portIndex; 2523 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2524 format.nIndex = index; 2525 status_t err = mOMX->getParameter( 2526 mNode, OMX_IndexParamAudioPortFormat, 2527 &format, sizeof(format)); 2528 2529 if (err != OK) { 2530 return err; 2531 } 2532 2533 if (format.eEncoding == desiredFormat) { 2534 break; 2535 } 2536 2537 if (index == kMaxIndicesToCheck) { 2538 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2539 mComponentName.c_str(), index, 2540 asString(format.eEncoding), format.eEncoding); 2541 return ERROR_UNSUPPORTED; 2542 } 2543 } 2544 2545 return mOMX->setParameter( 2546 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2547} 2548 2549status_t ACodec::setupAACCodec( 2550 bool encoder, int32_t numChannels, int32_t sampleRate, 2551 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2552 int32_t maxOutputChannelCount, const drcParams_t& drc, 2553 int32_t pcmLimiterEnable) { 2554 if (encoder && isADTS) { 2555 return -EINVAL; 2556 } 2557 2558 status_t err = setupRawAudioFormat( 2559 encoder ? kPortIndexInput : kPortIndexOutput, 2560 sampleRate, 2561 numChannels); 2562 2563 if (err != OK) { 2564 return err; 2565 } 2566 2567 if (encoder) { 2568 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2569 2570 if (err != OK) { 2571 return err; 2572 } 2573 2574 OMX_PARAM_PORTDEFINITIONTYPE def; 2575 InitOMXParams(&def); 2576 def.nPortIndex = kPortIndexOutput; 2577 2578 err = mOMX->getParameter( 2579 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2580 2581 if (err != OK) { 2582 return err; 2583 } 2584 2585 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2586 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2587 2588 err = mOMX->setParameter( 2589 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2590 2591 if (err != OK) { 2592 return err; 2593 } 2594 2595 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2596 InitOMXParams(&profile); 2597 profile.nPortIndex = kPortIndexOutput; 2598 2599 err = mOMX->getParameter( 2600 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2601 2602 if (err != OK) { 2603 return err; 2604 } 2605 2606 profile.nChannels = numChannels; 2607 2608 profile.eChannelMode = 2609 (numChannels == 1) 2610 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2611 2612 profile.nSampleRate = sampleRate; 2613 profile.nBitRate = bitRate; 2614 profile.nAudioBandWidth = 0; 2615 profile.nFrameLength = 0; 2616 profile.nAACtools = OMX_AUDIO_AACToolAll; 2617 profile.nAACERtools = OMX_AUDIO_AACERNone; 2618 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2619 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2620 switch (sbrMode) { 2621 case 0: 2622 // disable sbr 2623 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2624 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2625 break; 2626 case 1: 2627 // enable single-rate sbr 2628 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2629 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2630 break; 2631 case 2: 2632 // enable dual-rate sbr 2633 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2634 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2635 break; 2636 case -1: 2637 // enable both modes -> the codec will decide which mode should be used 2638 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2639 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2640 break; 2641 default: 2642 // unsupported sbr mode 2643 return BAD_VALUE; 2644 } 2645 2646 2647 err = mOMX->setParameter( 2648 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2649 2650 if (err != OK) { 2651 return err; 2652 } 2653 2654 return err; 2655 } 2656 2657 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2658 InitOMXParams(&profile); 2659 profile.nPortIndex = kPortIndexInput; 2660 2661 err = mOMX->getParameter( 2662 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2663 2664 if (err != OK) { 2665 return err; 2666 } 2667 2668 profile.nChannels = numChannels; 2669 profile.nSampleRate = sampleRate; 2670 2671 profile.eAACStreamFormat = 2672 isADTS 2673 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2674 : OMX_AUDIO_AACStreamFormatMP4FF; 2675 2676 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2677 InitOMXParams(&presentation); 2678 presentation.nMaxOutputChannels = maxOutputChannelCount; 2679 presentation.nDrcCut = drc.drcCut; 2680 presentation.nDrcBoost = drc.drcBoost; 2681 presentation.nHeavyCompression = drc.heavyCompression; 2682 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2683 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2684 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2685 2686 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2687 if (res == OK) { 2688 // optional parameters, will not cause configuration failure 2689 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2690 &presentation, sizeof(presentation)); 2691 } else { 2692 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2693 } 2694 mSampleRate = sampleRate; 2695 return res; 2696} 2697 2698status_t ACodec::setupAC3Codec( 2699 bool encoder, int32_t numChannels, int32_t sampleRate) { 2700 status_t err = setupRawAudioFormat( 2701 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2702 2703 if (err != OK) { 2704 return err; 2705 } 2706 2707 if (encoder) { 2708 ALOGW("AC3 encoding is not supported."); 2709 return INVALID_OPERATION; 2710 } 2711 2712 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2713 InitOMXParams(&def); 2714 def.nPortIndex = kPortIndexInput; 2715 2716 err = mOMX->getParameter( 2717 mNode, 2718 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2719 &def, 2720 sizeof(def)); 2721 2722 if (err != OK) { 2723 return err; 2724 } 2725 2726 def.nChannels = numChannels; 2727 def.nSampleRate = sampleRate; 2728 2729 return mOMX->setParameter( 2730 mNode, 2731 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2732 &def, 2733 sizeof(def)); 2734} 2735 2736status_t ACodec::setupEAC3Codec( 2737 bool encoder, int32_t numChannels, int32_t sampleRate) { 2738 status_t err = setupRawAudioFormat( 2739 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2740 2741 if (err != OK) { 2742 return err; 2743 } 2744 2745 if (encoder) { 2746 ALOGW("EAC3 encoding is not supported."); 2747 return INVALID_OPERATION; 2748 } 2749 2750 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2751 InitOMXParams(&def); 2752 def.nPortIndex = kPortIndexInput; 2753 2754 err = mOMX->getParameter( 2755 mNode, 2756 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2757 &def, 2758 sizeof(def)); 2759 2760 if (err != OK) { 2761 return err; 2762 } 2763 2764 def.nChannels = numChannels; 2765 def.nSampleRate = sampleRate; 2766 2767 return mOMX->setParameter( 2768 mNode, 2769 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2770 &def, 2771 sizeof(def)); 2772} 2773 2774static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2775 bool isAMRWB, int32_t bps) { 2776 if (isAMRWB) { 2777 if (bps <= 6600) { 2778 return OMX_AUDIO_AMRBandModeWB0; 2779 } else if (bps <= 8850) { 2780 return OMX_AUDIO_AMRBandModeWB1; 2781 } else if (bps <= 12650) { 2782 return OMX_AUDIO_AMRBandModeWB2; 2783 } else if (bps <= 14250) { 2784 return OMX_AUDIO_AMRBandModeWB3; 2785 } else if (bps <= 15850) { 2786 return OMX_AUDIO_AMRBandModeWB4; 2787 } else if (bps <= 18250) { 2788 return OMX_AUDIO_AMRBandModeWB5; 2789 } else if (bps <= 19850) { 2790 return OMX_AUDIO_AMRBandModeWB6; 2791 } else if (bps <= 23050) { 2792 return OMX_AUDIO_AMRBandModeWB7; 2793 } 2794 2795 // 23850 bps 2796 return OMX_AUDIO_AMRBandModeWB8; 2797 } else { // AMRNB 2798 if (bps <= 4750) { 2799 return OMX_AUDIO_AMRBandModeNB0; 2800 } else if (bps <= 5150) { 2801 return OMX_AUDIO_AMRBandModeNB1; 2802 } else if (bps <= 5900) { 2803 return OMX_AUDIO_AMRBandModeNB2; 2804 } else if (bps <= 6700) { 2805 return OMX_AUDIO_AMRBandModeNB3; 2806 } else if (bps <= 7400) { 2807 return OMX_AUDIO_AMRBandModeNB4; 2808 } else if (bps <= 7950) { 2809 return OMX_AUDIO_AMRBandModeNB5; 2810 } else if (bps <= 10200) { 2811 return OMX_AUDIO_AMRBandModeNB6; 2812 } 2813 2814 // 12200 bps 2815 return OMX_AUDIO_AMRBandModeNB7; 2816 } 2817} 2818 2819status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2820 OMX_AUDIO_PARAM_AMRTYPE def; 2821 InitOMXParams(&def); 2822 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2823 2824 status_t err = 2825 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2826 2827 if (err != OK) { 2828 return err; 2829 } 2830 2831 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2832 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2833 2834 err = mOMX->setParameter( 2835 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2836 2837 if (err != OK) { 2838 return err; 2839 } 2840 2841 return setupRawAudioFormat( 2842 encoder ? kPortIndexInput : kPortIndexOutput, 2843 isWAMR ? 16000 : 8000 /* sampleRate */, 2844 1 /* numChannels */); 2845} 2846 2847status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2848 if (encoder) { 2849 return INVALID_OPERATION; 2850 } 2851 2852 return setupRawAudioFormat( 2853 kPortIndexInput, sampleRate, numChannels); 2854} 2855 2856status_t ACodec::setupFlacCodec( 2857 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2858 2859 if (encoder) { 2860 OMX_AUDIO_PARAM_FLACTYPE def; 2861 InitOMXParams(&def); 2862 def.nPortIndex = kPortIndexOutput; 2863 2864 // configure compression level 2865 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2866 if (err != OK) { 2867 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2868 return err; 2869 } 2870 def.nCompressionLevel = compressionLevel; 2871 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2872 if (err != OK) { 2873 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2874 return err; 2875 } 2876 } 2877 2878 return setupRawAudioFormat( 2879 encoder ? kPortIndexInput : kPortIndexOutput, 2880 sampleRate, 2881 numChannels); 2882} 2883 2884status_t ACodec::setupRawAudioFormat( 2885 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { 2886 OMX_PARAM_PORTDEFINITIONTYPE def; 2887 InitOMXParams(&def); 2888 def.nPortIndex = portIndex; 2889 2890 status_t err = mOMX->getParameter( 2891 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2892 2893 if (err != OK) { 2894 return err; 2895 } 2896 2897 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2898 2899 err = mOMX->setParameter( 2900 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2901 2902 if (err != OK) { 2903 return err; 2904 } 2905 2906 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2907 InitOMXParams(&pcmParams); 2908 pcmParams.nPortIndex = portIndex; 2909 2910 err = mOMX->getParameter( 2911 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2912 2913 if (err != OK) { 2914 return err; 2915 } 2916 2917 pcmParams.nChannels = numChannels; 2918 switch (encoding) { 2919 case kAudioEncodingPcm8bit: 2920 pcmParams.eNumData = OMX_NumericalDataUnsigned; 2921 pcmParams.nBitPerSample = 8; 2922 break; 2923 case kAudioEncodingPcmFloat: 2924 pcmParams.eNumData = OMX_NumericalDataFloat; 2925 pcmParams.nBitPerSample = 32; 2926 break; 2927 case kAudioEncodingPcm16bit: 2928 pcmParams.eNumData = OMX_NumericalDataSigned; 2929 pcmParams.nBitPerSample = 16; 2930 break; 2931 default: 2932 return BAD_VALUE; 2933 } 2934 pcmParams.bInterleaved = OMX_TRUE; 2935 pcmParams.nSamplingRate = sampleRate; 2936 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 2937 2938 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 2939 return OMX_ErrorNone; 2940 } 2941 2942 err = mOMX->setParameter( 2943 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2944 // if we could not set up raw format to non-16-bit, try with 16-bit 2945 // NOTE: we will also verify this via readback, in case codec ignores these fields 2946 if (err != OK && encoding != kAudioEncodingPcm16bit) { 2947 pcmParams.eNumData = OMX_NumericalDataSigned; 2948 pcmParams.nBitPerSample = 16; 2949 err = mOMX->setParameter( 2950 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2951 } 2952 return err; 2953} 2954 2955status_t ACodec::configureTunneledVideoPlayback( 2956 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 2957 native_handle_t* sidebandHandle; 2958 2959 status_t err = mOMX->configureVideoTunnelMode( 2960 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 2961 if (err != OK) { 2962 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 2963 return err; 2964 } 2965 2966 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 2967 if (err != OK) { 2968 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 2969 sidebandHandle, err); 2970 return err; 2971 } 2972 2973 return OK; 2974} 2975 2976status_t ACodec::setVideoPortFormatType( 2977 OMX_U32 portIndex, 2978 OMX_VIDEO_CODINGTYPE compressionFormat, 2979 OMX_COLOR_FORMATTYPE colorFormat, 2980 bool usingNativeBuffers) { 2981 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2982 InitOMXParams(&format); 2983 format.nPortIndex = portIndex; 2984 format.nIndex = 0; 2985 bool found = false; 2986 2987 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2988 format.nIndex = index; 2989 status_t err = mOMX->getParameter( 2990 mNode, OMX_IndexParamVideoPortFormat, 2991 &format, sizeof(format)); 2992 2993 if (err != OK) { 2994 return err; 2995 } 2996 2997 // substitute back flexible color format to codec supported format 2998 OMX_U32 flexibleEquivalent; 2999 if (compressionFormat == OMX_VIDEO_CodingUnused 3000 && IsFlexibleColorFormat( 3001 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 3002 && colorFormat == flexibleEquivalent) { 3003 ALOGI("[%s] using color format %#x in place of %#x", 3004 mComponentName.c_str(), format.eColorFormat, colorFormat); 3005 colorFormat = format.eColorFormat; 3006 } 3007 3008 // The following assertion is violated by TI's video decoder. 3009 // CHECK_EQ(format.nIndex, index); 3010 3011 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 3012 if (portIndex == kPortIndexInput 3013 && colorFormat == format.eColorFormat) { 3014 // eCompressionFormat does not seem right. 3015 found = true; 3016 break; 3017 } 3018 if (portIndex == kPortIndexOutput 3019 && compressionFormat == format.eCompressionFormat) { 3020 // eColorFormat does not seem right. 3021 found = true; 3022 break; 3023 } 3024 } 3025 3026 if (format.eCompressionFormat == compressionFormat 3027 && format.eColorFormat == colorFormat) { 3028 found = true; 3029 break; 3030 } 3031 3032 if (index == kMaxIndicesToCheck) { 3033 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 3034 mComponentName.c_str(), index, 3035 asString(format.eCompressionFormat), format.eCompressionFormat, 3036 asString(format.eColorFormat), format.eColorFormat); 3037 } 3038 } 3039 3040 if (!found) { 3041 return UNKNOWN_ERROR; 3042 } 3043 3044 status_t err = mOMX->setParameter( 3045 mNode, OMX_IndexParamVideoPortFormat, 3046 &format, sizeof(format)); 3047 3048 return err; 3049} 3050 3051// Set optimal output format. OMX component lists output formats in the order 3052// of preference, but this got more complicated since the introduction of flexible 3053// YUV formats. We support a legacy behavior for applications that do not use 3054// surface output, do not specify an output format, but expect a "usable" standard 3055// OMX format. SW readable and standard formats must be flex-YUV. 3056// 3057// Suggested preference order: 3058// - optimal format for texture rendering (mediaplayer behavior) 3059// - optimal SW readable & texture renderable format (flex-YUV support) 3060// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 3061// - legacy "usable" standard formats 3062// 3063// For legacy support, we prefer a standard format, but will settle for a SW readable 3064// flex-YUV format. 3065status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 3066 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 3067 InitOMXParams(&format); 3068 format.nPortIndex = kPortIndexOutput; 3069 3070 InitOMXParams(&legacyFormat); 3071 // this field will change when we find a suitable legacy format 3072 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 3073 3074 for (OMX_U32 index = 0; ; ++index) { 3075 format.nIndex = index; 3076 status_t err = mOMX->getParameter( 3077 mNode, OMX_IndexParamVideoPortFormat, 3078 &format, sizeof(format)); 3079 if (err != OK) { 3080 // no more formats, pick legacy format if found 3081 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 3082 memcpy(&format, &legacyFormat, sizeof(format)); 3083 break; 3084 } 3085 return err; 3086 } 3087 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 3088 return OMX_ErrorBadParameter; 3089 } 3090 if (!getLegacyFlexibleFormat) { 3091 break; 3092 } 3093 // standard formats that were exposed to users before 3094 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 3095 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 3096 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 3097 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 3098 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 3099 break; 3100 } 3101 // find best legacy non-standard format 3102 OMX_U32 flexibleEquivalent; 3103 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 3104 && IsFlexibleColorFormat( 3105 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */, 3106 &flexibleEquivalent) 3107 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 3108 memcpy(&legacyFormat, &format, sizeof(format)); 3109 } 3110 } 3111 return mOMX->setParameter( 3112 mNode, OMX_IndexParamVideoPortFormat, 3113 &format, sizeof(format)); 3114} 3115 3116static const struct VideoCodingMapEntry { 3117 const char *mMime; 3118 OMX_VIDEO_CODINGTYPE mVideoCodingType; 3119} kVideoCodingMapEntry[] = { 3120 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 3121 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 3122 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 3123 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 3124 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 3125 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 3126 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 3127 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 3128}; 3129 3130static status_t GetVideoCodingTypeFromMime( 3131 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3132 for (size_t i = 0; 3133 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3134 ++i) { 3135 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3136 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3137 return OK; 3138 } 3139 } 3140 3141 *codingType = OMX_VIDEO_CodingUnused; 3142 3143 return ERROR_UNSUPPORTED; 3144} 3145 3146static status_t GetMimeTypeForVideoCoding( 3147 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3148 for (size_t i = 0; 3149 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3150 ++i) { 3151 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3152 *mime = kVideoCodingMapEntry[i].mMime; 3153 return OK; 3154 } 3155 } 3156 3157 mime->clear(); 3158 3159 return ERROR_UNSUPPORTED; 3160} 3161 3162status_t ACodec::setPortBufferNum(OMX_U32 portIndex, int bufferNum) { 3163 OMX_PARAM_PORTDEFINITIONTYPE def; 3164 InitOMXParams(&def); 3165 def.nPortIndex = portIndex; 3166 status_t err; 3167 ALOGD("Setting [%s] %s port buffer number: %d", mComponentName.c_str(), 3168 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3169 err = mOMX->getParameter( 3170 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3171 if (err != OK) { 3172 return err; 3173 } 3174 def.nBufferCountActual = bufferNum; 3175 err = mOMX->setParameter( 3176 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3177 if (err != OK) { 3178 // Component could reject this request. 3179 ALOGW("Fail to set [%s] %s port buffer number: %d", mComponentName.c_str(), 3180 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3181 } 3182 return OK; 3183} 3184 3185status_t ACodec::setupVideoDecoder( 3186 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3187 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3188 int32_t width, height; 3189 if (!msg->findInt32("width", &width) 3190 || !msg->findInt32("height", &height)) { 3191 return INVALID_OPERATION; 3192 } 3193 3194 OMX_VIDEO_CODINGTYPE compressionFormat; 3195 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3196 3197 if (err != OK) { 3198 return err; 3199 } 3200 3201 if (compressionFormat == OMX_VIDEO_CodingVP9) { 3202 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 3203 InitOMXParams(¶ms); 3204 params.nPortIndex = kPortIndexInput; 3205 // Check if VP9 decoder advertises supported profiles. 3206 params.nProfileIndex = 0; 3207 status_t err = mOMX->getParameter( 3208 mNode, 3209 OMX_IndexParamVideoProfileLevelQuerySupported, 3210 ¶ms, 3211 sizeof(params)); 3212 mIsLegacyVP9Decoder = err != OK; 3213 } 3214 3215 err = setVideoPortFormatType( 3216 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3217 3218 if (err != OK) { 3219 return err; 3220 } 3221 3222 int32_t tmp; 3223 if (msg->findInt32("color-format", &tmp)) { 3224 OMX_COLOR_FORMATTYPE colorFormat = 3225 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3226 err = setVideoPortFormatType( 3227 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3228 if (err != OK) { 3229 ALOGW("[%s] does not support color format %d", 3230 mComponentName.c_str(), colorFormat); 3231 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3232 } 3233 } else { 3234 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3235 } 3236 3237 if (err != OK) { 3238 return err; 3239 } 3240 3241 // Set the component input buffer number to be |tmp|. If succeed, 3242 // component will set input port buffer number to be |tmp|. If fail, 3243 // component will keep the same buffer number as before. 3244 if (msg->findInt32("android._num-input-buffers", &tmp)) { 3245 err = setPortBufferNum(kPortIndexInput, tmp); 3246 if (err != OK) 3247 return err; 3248 } 3249 3250 // Set the component output buffer number to be |tmp|. If succeed, 3251 // component will set output port buffer number to be |tmp|. If fail, 3252 // component will keep the same buffer number as before. 3253 if (msg->findInt32("android._num-output-buffers", &tmp)) { 3254 err = setPortBufferNum(kPortIndexOutput, tmp); 3255 if (err != OK) 3256 return err; 3257 } 3258 3259 int32_t frameRateInt; 3260 float frameRateFloat; 3261 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3262 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3263 frameRateInt = -1; 3264 } 3265 frameRateFloat = (float)frameRateInt; 3266 } 3267 3268 err = setVideoFormatOnPort( 3269 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3270 3271 if (err != OK) { 3272 return err; 3273 } 3274 3275 err = setVideoFormatOnPort( 3276 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3277 3278 if (err != OK) { 3279 return err; 3280 } 3281 3282 err = setColorAspectsForVideoDecoder( 3283 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3284 if (err == ERROR_UNSUPPORTED) { // support is optional 3285 err = OK; 3286 } 3287 3288 if (err != OK) { 3289 return err; 3290 } 3291 3292 err = setHDRStaticInfoForVideoCodec(kPortIndexOutput, msg, outputFormat); 3293 if (err == ERROR_UNSUPPORTED) { // support is optional 3294 err = OK; 3295 } 3296 return err; 3297} 3298 3299status_t ACodec::initDescribeColorAspectsIndex() { 3300 status_t err = mOMX->getExtensionIndex( 3301 mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3302 if (err != OK) { 3303 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3304 } 3305 return err; 3306} 3307 3308status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3309 status_t err = ERROR_UNSUPPORTED; 3310 if (mDescribeColorAspectsIndex) { 3311 err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3312 } 3313 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3314 mComponentName.c_str(), 3315 params.sAspects.mRange, asString(params.sAspects.mRange), 3316 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3317 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3318 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3319 err, asString(err)); 3320 3321 if (verify && err == OK) { 3322 err = getCodecColorAspects(params); 3323 } 3324 3325 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3326 "[%s] setting color aspects failed even though codec advertises support", 3327 mComponentName.c_str()); 3328 return err; 3329} 3330 3331status_t ACodec::setColorAspectsForVideoDecoder( 3332 int32_t width, int32_t height, bool usingNativeWindow, 3333 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3334 DescribeColorAspectsParams params; 3335 InitOMXParams(¶ms); 3336 params.nPortIndex = kPortIndexOutput; 3337 3338 getColorAspectsFromFormat(configFormat, params.sAspects); 3339 if (usingNativeWindow) { 3340 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3341 // The default aspects will be set back to the output format during the 3342 // getFormat phase of configure(). Set non-Unspecified values back into the 3343 // format, in case component does not support this enumeration. 3344 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3345 } 3346 3347 (void)initDescribeColorAspectsIndex(); 3348 3349 // communicate color aspects to codec 3350 return setCodecColorAspects(params); 3351} 3352 3353status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3354 status_t err = ERROR_UNSUPPORTED; 3355 if (mDescribeColorAspectsIndex) { 3356 err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3357 } 3358 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3359 mComponentName.c_str(), 3360 params.sAspects.mRange, asString(params.sAspects.mRange), 3361 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3362 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3363 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3364 err, asString(err)); 3365 if (params.bRequestingDataSpace) { 3366 ALOGV("for dataspace %#x", params.nDataSpace); 3367 } 3368 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3369 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3370 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3371 mComponentName.c_str()); 3372 } 3373 return err; 3374} 3375 3376status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3377 DescribeColorAspectsParams params; 3378 InitOMXParams(¶ms); 3379 params.nPortIndex = kPortIndexInput; 3380 status_t err = getCodecColorAspects(params); 3381 if (err == OK) { 3382 // we only set encoder input aspects if codec supports them 3383 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3384 } 3385 return err; 3386} 3387 3388status_t ACodec::getDataSpace( 3389 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3390 bool tryCodec) { 3391 status_t err = OK; 3392 if (tryCodec) { 3393 // request dataspace guidance from codec. 3394 params.bRequestingDataSpace = OMX_TRUE; 3395 err = getCodecColorAspects(params); 3396 params.bRequestingDataSpace = OMX_FALSE; 3397 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3398 *dataSpace = (android_dataspace)params.nDataSpace; 3399 return err; 3400 } else if (err == ERROR_UNSUPPORTED) { 3401 // ignore not-implemented error for dataspace requests 3402 err = OK; 3403 } 3404 } 3405 3406 // this returns legacy versions if available 3407 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3408 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3409 "and dataspace %#x", 3410 mComponentName.c_str(), 3411 params.sAspects.mRange, asString(params.sAspects.mRange), 3412 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3413 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3414 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3415 *dataSpace); 3416 return err; 3417} 3418 3419 3420status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3421 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3422 android_dataspace *dataSpace) { 3423 DescribeColorAspectsParams params; 3424 InitOMXParams(¶ms); 3425 params.nPortIndex = kPortIndexOutput; 3426 3427 // reset default format and get resulting format 3428 getColorAspectsFromFormat(configFormat, params.sAspects); 3429 if (dataSpace != NULL) { 3430 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3431 } 3432 status_t err = setCodecColorAspects(params, true /* readBack */); 3433 3434 // we always set specified aspects for decoders 3435 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3436 3437 if (dataSpace != NULL) { 3438 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3439 if (err == OK) { 3440 err = res; 3441 } 3442 } 3443 3444 return err; 3445} 3446 3447// initial video encoder setup for bytebuffer mode 3448status_t ACodec::setColorAspectsForVideoEncoder( 3449 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3450 // copy config to output format as this is not exposed via getFormat 3451 copyColorConfig(configFormat, outputFormat); 3452 3453 DescribeColorAspectsParams params; 3454 InitOMXParams(¶ms); 3455 params.nPortIndex = kPortIndexInput; 3456 getColorAspectsFromFormat(configFormat, params.sAspects); 3457 3458 (void)initDescribeColorAspectsIndex(); 3459 3460 int32_t usingRecorder; 3461 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3462 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3463 int32_t width, height; 3464 if (configFormat->findInt32("width", &width) 3465 && configFormat->findInt32("height", &height)) { 3466 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3467 status_t err = getDataSpace( 3468 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3469 if (err != OK) { 3470 return err; 3471 } 3472 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3473 } 3474 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3475 } 3476 3477 // communicate color aspects to codec, but do not allow change of the platform aspects 3478 ColorAspects origAspects = params.sAspects; 3479 for (int triesLeft = 2; --triesLeft >= 0; ) { 3480 status_t err = setCodecColorAspects(params, true /* readBack */); 3481 if (err != OK 3482 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3483 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3484 return err; 3485 } 3486 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3487 mComponentName.c_str()); 3488 } 3489 return OK; 3490} 3491 3492status_t ACodec::setHDRStaticInfoForVideoCodec( 3493 OMX_U32 portIndex, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3494 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3495 3496 DescribeHDRStaticInfoParams params; 3497 InitOMXParams(¶ms); 3498 params.nPortIndex = portIndex; 3499 3500 HDRStaticInfo *info = ¶ms.sInfo; 3501 if (getHDRStaticInfoFromFormat(configFormat, info)) { 3502 setHDRStaticInfoIntoFormat(params.sInfo, outputFormat); 3503 } 3504 3505 (void)initDescribeHDRStaticInfoIndex(); 3506 3507 // communicate HDR static Info to codec 3508 return setHDRStaticInfo(params); 3509} 3510 3511// subsequent initial video encoder setup for surface mode 3512status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3513 android_dataspace *dataSpace /* nonnull */) { 3514 DescribeColorAspectsParams params; 3515 InitOMXParams(¶ms); 3516 params.nPortIndex = kPortIndexInput; 3517 ColorAspects &aspects = params.sAspects; 3518 3519 // reset default format and store resulting format into both input and output formats 3520 getColorAspectsFromFormat(mConfigFormat, aspects); 3521 int32_t width, height; 3522 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3523 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3524 } 3525 setColorAspectsIntoFormat(aspects, mInputFormat); 3526 setColorAspectsIntoFormat(aspects, mOutputFormat); 3527 3528 // communicate color aspects to codec, but do not allow any change 3529 ColorAspects origAspects = aspects; 3530 status_t err = OK; 3531 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3532 status_t err = setCodecColorAspects(params, true /* readBack */); 3533 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3534 break; 3535 } 3536 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3537 mComponentName.c_str()); 3538 } 3539 3540 *dataSpace = HAL_DATASPACE_BT709; 3541 aspects = origAspects; // restore desired color aspects 3542 status_t res = getDataSpace( 3543 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3544 if (err == OK) { 3545 err = res; 3546 } 3547 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3548 mInputFormat->setBuffer( 3549 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3550 3551 // update input format with codec supported color aspects (basically set unsupported 3552 // aspects to Unspecified) 3553 if (err == OK) { 3554 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3555 } 3556 3557 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3558 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3559 3560 return err; 3561} 3562 3563status_t ACodec::getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format) { 3564 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3565 DescribeHDRStaticInfoParams params; 3566 InitOMXParams(¶ms); 3567 params.nPortIndex = portIndex; 3568 3569 status_t err = getHDRStaticInfo(params); 3570 if (err == OK) { 3571 // we only set decodec output HDRStaticInfo if codec supports them 3572 setHDRStaticInfoIntoFormat(params.sInfo, format); 3573 } 3574 return err; 3575} 3576 3577status_t ACodec::initDescribeHDRStaticInfoIndex() { 3578 status_t err = mOMX->getExtensionIndex( 3579 mNode, "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex); 3580 if (err != OK) { 3581 mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0; 3582 } 3583 return err; 3584} 3585 3586status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) { 3587 status_t err = ERROR_UNSUPPORTED; 3588 if (mDescribeHDRStaticInfoIndex) { 3589 err = mOMX->setConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3590 } 3591 3592 const HDRStaticInfo *info = ¶ms.sInfo; 3593 ALOGV("[%s] setting HDRStaticInfo (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, " 3594 "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)", 3595 mComponentName.c_str(), 3596 info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y, 3597 info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y, 3598 info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance, 3599 info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel); 3600 3601 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3602 "[%s] setting HDRStaticInfo failed even though codec advertises support", 3603 mComponentName.c_str()); 3604 return err; 3605} 3606 3607status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms) { 3608 status_t err = ERROR_UNSUPPORTED; 3609 if (mDescribeHDRStaticInfoIndex) { 3610 err = mOMX->getConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3611 } 3612 3613 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3614 "[%s] getting HDRStaticInfo failed even though codec advertises support", 3615 mComponentName.c_str()); 3616 return err; 3617} 3618 3619status_t ACodec::setupVideoEncoder( 3620 const char *mime, const sp<AMessage> &msg, 3621 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3622 int32_t tmp; 3623 if (!msg->findInt32("color-format", &tmp)) { 3624 return INVALID_OPERATION; 3625 } 3626 3627 OMX_COLOR_FORMATTYPE colorFormat = 3628 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3629 3630 status_t err = setVideoPortFormatType( 3631 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3632 3633 if (err != OK) { 3634 ALOGE("[%s] does not support color format %d", 3635 mComponentName.c_str(), colorFormat); 3636 3637 return err; 3638 } 3639 3640 /* Input port configuration */ 3641 3642 OMX_PARAM_PORTDEFINITIONTYPE def; 3643 InitOMXParams(&def); 3644 3645 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3646 3647 def.nPortIndex = kPortIndexInput; 3648 3649 err = mOMX->getParameter( 3650 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3651 3652 if (err != OK) { 3653 return err; 3654 } 3655 3656 int32_t width, height, bitrate; 3657 if (!msg->findInt32("width", &width) 3658 || !msg->findInt32("height", &height) 3659 || !msg->findInt32("bitrate", &bitrate)) { 3660 return INVALID_OPERATION; 3661 } 3662 3663 video_def->nFrameWidth = width; 3664 video_def->nFrameHeight = height; 3665 3666 int32_t stride; 3667 if (!msg->findInt32("stride", &stride)) { 3668 stride = width; 3669 } 3670 3671 video_def->nStride = stride; 3672 3673 int32_t sliceHeight; 3674 if (!msg->findInt32("slice-height", &sliceHeight)) { 3675 sliceHeight = height; 3676 } 3677 3678 video_def->nSliceHeight = sliceHeight; 3679 3680 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3681 3682 float frameRate; 3683 if (!msg->findFloat("frame-rate", &frameRate)) { 3684 int32_t tmp; 3685 if (!msg->findInt32("frame-rate", &tmp)) { 3686 return INVALID_OPERATION; 3687 } 3688 frameRate = (float)tmp; 3689 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3690 } 3691 3692 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3693 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3694 // this is redundant as it was already set up in setVideoPortFormatType 3695 // FIXME for now skip this only for flexible YUV formats 3696 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3697 video_def->eColorFormat = colorFormat; 3698 } 3699 3700 err = mOMX->setParameter( 3701 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3702 3703 if (err != OK) { 3704 ALOGE("[%s] failed to set input port definition parameters.", 3705 mComponentName.c_str()); 3706 3707 return err; 3708 } 3709 3710 /* Output port configuration */ 3711 3712 OMX_VIDEO_CODINGTYPE compressionFormat; 3713 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3714 3715 if (err != OK) { 3716 return err; 3717 } 3718 3719 err = setVideoPortFormatType( 3720 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3721 3722 if (err != OK) { 3723 ALOGE("[%s] does not support compression format %d", 3724 mComponentName.c_str(), compressionFormat); 3725 3726 return err; 3727 } 3728 3729 def.nPortIndex = kPortIndexOutput; 3730 3731 err = mOMX->getParameter( 3732 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3733 3734 if (err != OK) { 3735 return err; 3736 } 3737 3738 video_def->nFrameWidth = width; 3739 video_def->nFrameHeight = height; 3740 video_def->xFramerate = 0; 3741 video_def->nBitrate = bitrate; 3742 video_def->eCompressionFormat = compressionFormat; 3743 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3744 3745 err = mOMX->setParameter( 3746 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3747 3748 if (err != OK) { 3749 ALOGE("[%s] failed to set output port definition parameters.", 3750 mComponentName.c_str()); 3751 3752 return err; 3753 } 3754 3755 int32_t intraRefreshPeriod = 0; 3756 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3757 && intraRefreshPeriod >= 0) { 3758 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3759 if (err != OK) { 3760 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3761 mComponentName.c_str()); 3762 err = OK; 3763 } 3764 } 3765 3766 switch (compressionFormat) { 3767 case OMX_VIDEO_CodingMPEG4: 3768 err = setupMPEG4EncoderParameters(msg); 3769 break; 3770 3771 case OMX_VIDEO_CodingH263: 3772 err = setupH263EncoderParameters(msg); 3773 break; 3774 3775 case OMX_VIDEO_CodingAVC: 3776 err = setupAVCEncoderParameters(msg); 3777 break; 3778 3779 case OMX_VIDEO_CodingHEVC: 3780 err = setupHEVCEncoderParameters(msg); 3781 break; 3782 3783 case OMX_VIDEO_CodingVP8: 3784 case OMX_VIDEO_CodingVP9: 3785 err = setupVPXEncoderParameters(msg, outputFormat); 3786 break; 3787 3788 default: 3789 break; 3790 } 3791 3792 if (err != OK) { 3793 return err; 3794 } 3795 3796 // Set up color aspects on input, but propagate them to the output format, as they will 3797 // not be read back from encoder. 3798 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3799 if (err == ERROR_UNSUPPORTED) { 3800 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3801 err = OK; 3802 } 3803 3804 if (err != OK) { 3805 return err; 3806 } 3807 3808 err = setHDRStaticInfoForVideoCodec(kPortIndexInput, msg, outputFormat); 3809 if (err == ERROR_UNSUPPORTED) { // support is optional 3810 ALOGI("[%s] cannot encode HDR static metadata. Ignoring.", mComponentName.c_str()); 3811 err = OK; 3812 } 3813 3814 if (err != OK) { 3815 return err; 3816 } 3817 3818 switch (compressionFormat) { 3819 case OMX_VIDEO_CodingAVC: 3820 case OMX_VIDEO_CodingHEVC: 3821 err = configureTemporalLayers(msg, true /* inConfigure */, outputFormat); 3822 if (err != OK) { 3823 err = OK; // ignore failure 3824 } 3825 break; 3826 3827 case OMX_VIDEO_CodingVP8: 3828 case OMX_VIDEO_CodingVP9: 3829 // TODO: do we need to support android.generic layering? webrtc layering is 3830 // already set up in setupVPXEncoderParameters. 3831 break; 3832 3833 default: 3834 break; 3835 } 3836 3837 if (err == OK) { 3838 ALOGI("setupVideoEncoder succeeded"); 3839 } 3840 3841 return err; 3842} 3843 3844status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3845 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3846 InitOMXParams(¶ms); 3847 params.nPortIndex = kPortIndexOutput; 3848 3849 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3850 3851 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3852 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3853 int32_t mbs; 3854 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3855 return INVALID_OPERATION; 3856 } 3857 params.nCirMBs = mbs; 3858 } 3859 3860 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3861 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3862 int32_t mbs; 3863 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3864 return INVALID_OPERATION; 3865 } 3866 params.nAirMBs = mbs; 3867 3868 int32_t ref; 3869 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3870 return INVALID_OPERATION; 3871 } 3872 params.nAirRef = ref; 3873 } 3874 3875 status_t err = mOMX->setParameter( 3876 mNode, OMX_IndexParamVideoIntraRefresh, 3877 ¶ms, sizeof(params)); 3878 return err; 3879} 3880 3881static OMX_U32 setPFramesSpacing( 3882 float iFramesInterval /* seconds */, int32_t frameRate, uint32_t BFramesSpacing = 0) { 3883 // BFramesSpacing is the number of B frames between I/P frames 3884 // PFramesSpacing (the value to be returned) is the number of P frames between I frames 3885 // 3886 // keyFrameInterval = ((PFramesSpacing + 1) * BFramesSpacing) + PFramesSpacing + 1 3887 // ^^^ ^^^ ^^^ 3888 // number of B frames number of P I frame 3889 // 3890 // = (PFramesSpacing + 1) * (BFramesSpacing + 1) 3891 // 3892 // E.g. 3893 // I P I : I-interval: 8, nPFrames 1, nBFrames 3 3894 // BBB BBB 3895 3896 if (iFramesInterval < 0) { // just 1 key frame 3897 return 0xFFFFFFFE; // don't use maxint as key-frame-interval calculation will add 1 3898 } else if (iFramesInterval == 0) { // just key frames 3899 return 0; 3900 } 3901 3902 // round down as key-frame-interval is an upper limit 3903 uint32_t keyFrameInterval = uint32_t(frameRate * iFramesInterval); 3904 OMX_U32 ret = keyFrameInterval / (BFramesSpacing + 1); 3905 return ret > 0 ? ret - 1 : 0; 3906} 3907 3908static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 3909 int32_t tmp; 3910 if (!msg->findInt32("bitrate-mode", &tmp)) { 3911 return OMX_Video_ControlRateVariable; 3912 } 3913 3914 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 3915} 3916 3917status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 3918 int32_t bitrate; 3919 float iFrameInterval; 3920 if (!msg->findInt32("bitrate", &bitrate) 3921 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 3922 return INVALID_OPERATION; 3923 } 3924 3925 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3926 3927 float frameRate; 3928 if (!msg->findFloat("frame-rate", &frameRate)) { 3929 int32_t tmp; 3930 if (!msg->findInt32("frame-rate", &tmp)) { 3931 return INVALID_OPERATION; 3932 } 3933 frameRate = (float)tmp; 3934 } 3935 3936 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 3937 InitOMXParams(&mpeg4type); 3938 mpeg4type.nPortIndex = kPortIndexOutput; 3939 3940 status_t err = mOMX->getParameter( 3941 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3942 3943 if (err != OK) { 3944 return err; 3945 } 3946 3947 mpeg4type.nSliceHeaderSpacing = 0; 3948 mpeg4type.bSVH = OMX_FALSE; 3949 mpeg4type.bGov = OMX_FALSE; 3950 3951 mpeg4type.nAllowedPictureTypes = 3952 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3953 3954 mpeg4type.nBFrames = 0; 3955 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, mpeg4type.nBFrames); 3956 if (mpeg4type.nPFrames == 0) { 3957 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3958 } 3959 mpeg4type.nIDCVLCThreshold = 0; 3960 mpeg4type.bACPred = OMX_TRUE; 3961 mpeg4type.nMaxPacketSize = 256; 3962 mpeg4type.nTimeIncRes = 1000; 3963 mpeg4type.nHeaderExtension = 0; 3964 mpeg4type.bReversibleVLC = OMX_FALSE; 3965 3966 int32_t profile; 3967 if (msg->findInt32("profile", &profile)) { 3968 int32_t level; 3969 if (!msg->findInt32("level", &level)) { 3970 return INVALID_OPERATION; 3971 } 3972 3973 err = verifySupportForProfileAndLevel(profile, level); 3974 3975 if (err != OK) { 3976 return err; 3977 } 3978 3979 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 3980 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 3981 } 3982 3983 err = mOMX->setParameter( 3984 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3985 3986 if (err != OK) { 3987 return err; 3988 } 3989 3990 err = configureBitrate(bitrate, bitrateMode); 3991 3992 if (err != OK) { 3993 return err; 3994 } 3995 3996 return setupErrorCorrectionParameters(); 3997} 3998 3999status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 4000 int32_t bitrate; 4001 float iFrameInterval; 4002 if (!msg->findInt32("bitrate", &bitrate) 4003 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 4004 return INVALID_OPERATION; 4005 } 4006 4007 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4008 4009 float frameRate; 4010 if (!msg->findFloat("frame-rate", &frameRate)) { 4011 int32_t tmp; 4012 if (!msg->findInt32("frame-rate", &tmp)) { 4013 return INVALID_OPERATION; 4014 } 4015 frameRate = (float)tmp; 4016 } 4017 4018 OMX_VIDEO_PARAM_H263TYPE h263type; 4019 InitOMXParams(&h263type); 4020 h263type.nPortIndex = kPortIndexOutput; 4021 4022 status_t err = mOMX->getParameter( 4023 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 4024 4025 if (err != OK) { 4026 return err; 4027 } 4028 4029 h263type.nAllowedPictureTypes = 4030 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4031 4032 h263type.nBFrames = 0; 4033 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h263type.nBFrames); 4034 if (h263type.nPFrames == 0) { 4035 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4036 } 4037 4038 int32_t profile; 4039 if (msg->findInt32("profile", &profile)) { 4040 int32_t level; 4041 if (!msg->findInt32("level", &level)) { 4042 return INVALID_OPERATION; 4043 } 4044 4045 err = verifySupportForProfileAndLevel(profile, level); 4046 4047 if (err != OK) { 4048 return err; 4049 } 4050 4051 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 4052 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 4053 } 4054 4055 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 4056 h263type.bForceRoundingTypeToZero = OMX_FALSE; 4057 h263type.nPictureHeaderRepetition = 0; 4058 h263type.nGOBHeaderInterval = 0; 4059 4060 err = mOMX->setParameter( 4061 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 4062 4063 if (err != OK) { 4064 return err; 4065 } 4066 4067 err = configureBitrate(bitrate, bitrateMode); 4068 4069 if (err != OK) { 4070 return err; 4071 } 4072 4073 return setupErrorCorrectionParameters(); 4074} 4075 4076// static 4077int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 4078 int width, int height, int rate, int bitrate, 4079 OMX_VIDEO_AVCPROFILETYPE profile) { 4080 // convert bitrate to main/baseline profile kbps equivalent 4081 switch (profile) { 4082 case OMX_VIDEO_AVCProfileHigh10: 4083 bitrate = divUp(bitrate, 3000); break; 4084 case OMX_VIDEO_AVCProfileHigh: 4085 bitrate = divUp(bitrate, 1250); break; 4086 default: 4087 bitrate = divUp(bitrate, 1000); break; 4088 } 4089 4090 // convert size and rate to MBs 4091 width = divUp(width, 16); 4092 height = divUp(height, 16); 4093 int mbs = width * height; 4094 rate *= mbs; 4095 int maxDimension = max(width, height); 4096 4097 static const int limits[][5] = { 4098 /* MBps MB dim bitrate level */ 4099 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 4100 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 4101 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 4102 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 4103 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 4104 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 4105 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 4106 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 4107 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 4108 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 4109 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 4110 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 4111 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 4112 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 4113 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 4114 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 4115 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 4116 }; 4117 4118 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 4119 const int (&limit)[5] = limits[i]; 4120 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 4121 && bitrate <= limit[3]) { 4122 return limit[4]; 4123 } 4124 } 4125 return 0; 4126} 4127 4128status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 4129 int32_t bitrate; 4130 float iFrameInterval; 4131 if (!msg->findInt32("bitrate", &bitrate) 4132 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 4133 return INVALID_OPERATION; 4134 } 4135 4136 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4137 4138 float frameRate; 4139 if (!msg->findFloat("frame-rate", &frameRate)) { 4140 int32_t tmp; 4141 if (!msg->findInt32("frame-rate", &tmp)) { 4142 return INVALID_OPERATION; 4143 } 4144 frameRate = (float)tmp; 4145 } 4146 4147 status_t err = OK; 4148 int32_t intraRefreshMode = 0; 4149 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 4150 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 4151 if (err != OK) { 4152 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 4153 err, intraRefreshMode); 4154 return err; 4155 } 4156 } 4157 4158 OMX_VIDEO_PARAM_AVCTYPE h264type; 4159 InitOMXParams(&h264type); 4160 h264type.nPortIndex = kPortIndexOutput; 4161 4162 err = mOMX->getParameter( 4163 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4164 4165 if (err != OK) { 4166 return err; 4167 } 4168 4169 h264type.nAllowedPictureTypes = 4170 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4171 4172 int32_t profile; 4173 if (msg->findInt32("profile", &profile)) { 4174 int32_t level; 4175 if (!msg->findInt32("level", &level)) { 4176 return INVALID_OPERATION; 4177 } 4178 4179 err = verifySupportForProfileAndLevel(profile, level); 4180 4181 if (err != OK) { 4182 return err; 4183 } 4184 4185 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 4186 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 4187 } else { 4188 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 4189#if 0 /* DON'T YET DEFAULT TO HIGHEST PROFILE */ 4190 // Use largest supported profile for AVC recording if profile is not specified. 4191 for (OMX_VIDEO_AVCPROFILETYPE profile : { 4192 OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCProfileMain }) { 4193 if (verifySupportForProfileAndLevel(profile, 0) == OK) { 4194 h264type.eProfile = profile; 4195 break; 4196 } 4197 } 4198#endif 4199 } 4200 4201 ALOGI("setupAVCEncoderParameters with [profile: %s] [level: %s]", 4202 asString(h264type.eProfile), asString(h264type.eLevel)); 4203 4204 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 4205 h264type.nSliceHeaderSpacing = 0; 4206 h264type.bUseHadamard = OMX_TRUE; 4207 h264type.nRefFrames = 1; 4208 h264type.nBFrames = 0; 4209 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4210 if (h264type.nPFrames == 0) { 4211 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4212 } 4213 h264type.nRefIdx10ActiveMinus1 = 0; 4214 h264type.nRefIdx11ActiveMinus1 = 0; 4215 h264type.bEntropyCodingCABAC = OMX_FALSE; 4216 h264type.bWeightedPPrediction = OMX_FALSE; 4217 h264type.bconstIpred = OMX_FALSE; 4218 h264type.bDirect8x8Inference = OMX_FALSE; 4219 h264type.bDirectSpatialTemporal = OMX_FALSE; 4220 h264type.nCabacInitIdc = 0; 4221 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 4222 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 4223 h264type.nSliceHeaderSpacing = 0; 4224 h264type.bUseHadamard = OMX_TRUE; 4225 h264type.nRefFrames = 2; 4226 h264type.nBFrames = 1; 4227 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4228 h264type.nAllowedPictureTypes = 4229 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 4230 h264type.nRefIdx10ActiveMinus1 = 0; 4231 h264type.nRefIdx11ActiveMinus1 = 0; 4232 h264type.bEntropyCodingCABAC = OMX_TRUE; 4233 h264type.bWeightedPPrediction = OMX_TRUE; 4234 h264type.bconstIpred = OMX_TRUE; 4235 h264type.bDirect8x8Inference = OMX_TRUE; 4236 h264type.bDirectSpatialTemporal = OMX_TRUE; 4237 h264type.nCabacInitIdc = 1; 4238 } 4239 4240 if (h264type.nBFrames != 0) { 4241 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 4242 } 4243 4244 h264type.bEnableUEP = OMX_FALSE; 4245 h264type.bEnableFMO = OMX_FALSE; 4246 h264type.bEnableASO = OMX_FALSE; 4247 h264type.bEnableRS = OMX_FALSE; 4248 h264type.bFrameMBsOnly = OMX_TRUE; 4249 h264type.bMBAFF = OMX_FALSE; 4250 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 4251 4252 err = mOMX->setParameter( 4253 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4254 4255 if (err != OK) { 4256 return err; 4257 } 4258 4259 // TRICKY: if we are enabling temporal layering as well, some codecs may not support layering 4260 // when B-frames are enabled. Detect this now so we can disable B frames if temporal layering 4261 // is preferred. 4262 AString tsSchema; 4263 int32_t preferBFrames = (int32_t)false; 4264 if (msg->findString("ts-schema", &tsSchema) 4265 && (!msg->findInt32("android._prefer-b-frames", &preferBFrames) || !preferBFrames)) { 4266 OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layering; 4267 InitOMXParams(&layering); 4268 layering.nPortIndex = kPortIndexOutput; 4269 if (mOMX->getParameter( 4270 mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 4271 &layering, sizeof(layering)) == OK 4272 && layering.eSupportedPatterns 4273 && layering.nBLayerCountMax == 0) { 4274 h264type.nBFrames = 0; 4275 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4276 h264type.nAllowedPictureTypes &= ~OMX_VIDEO_PictureTypeB; 4277 ALOGI("disabling B-frames"); 4278 err = mOMX->setParameter( 4279 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4280 4281 if (err != OK) { 4282 return err; 4283 } 4284 } 4285 } 4286 4287 return configureBitrate(bitrate, bitrateMode); 4288} 4289 4290status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 4291 int32_t bitrate; 4292 float iFrameInterval; 4293 if (!msg->findInt32("bitrate", &bitrate) 4294 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 4295 return INVALID_OPERATION; 4296 } 4297 4298 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4299 4300 float frameRate; 4301 if (!msg->findFloat("frame-rate", &frameRate)) { 4302 int32_t tmp; 4303 if (!msg->findInt32("frame-rate", &tmp)) { 4304 return INVALID_OPERATION; 4305 } 4306 frameRate = (float)tmp; 4307 } 4308 4309 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 4310 InitOMXParams(&hevcType); 4311 hevcType.nPortIndex = kPortIndexOutput; 4312 4313 status_t err = OK; 4314 err = mOMX->getParameter( 4315 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4316 if (err != OK) { 4317 return err; 4318 } 4319 4320 int32_t profile; 4321 if (msg->findInt32("profile", &profile)) { 4322 int32_t level; 4323 if (!msg->findInt32("level", &level)) { 4324 return INVALID_OPERATION; 4325 } 4326 4327 err = verifySupportForProfileAndLevel(profile, level); 4328 if (err != OK) { 4329 return err; 4330 } 4331 4332 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 4333 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 4334 } 4335 // TODO: finer control? 4336 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1; 4337 4338 err = mOMX->setParameter( 4339 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4340 if (err != OK) { 4341 return err; 4342 } 4343 4344 return configureBitrate(bitrate, bitrateMode); 4345} 4346 4347status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg, sp<AMessage> &outputFormat) { 4348 int32_t bitrate; 4349 float iFrameInterval = 0; 4350 size_t tsLayers = 0; 4351 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 4352 OMX_VIDEO_VPXTemporalLayerPatternNone; 4353 static const uint32_t kVp8LayerRateAlloction 4354 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 4355 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 4356 {100, 100, 100}, // 1 layer 4357 { 60, 100, 100}, // 2 layers {60%, 40%} 4358 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 4359 }; 4360 if (!msg->findInt32("bitrate", &bitrate)) { 4361 return INVALID_OPERATION; 4362 } 4363 msg->findAsFloat("i-frame-interval", &iFrameInterval); 4364 4365 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4366 4367 float frameRate; 4368 if (!msg->findFloat("frame-rate", &frameRate)) { 4369 int32_t tmp; 4370 if (!msg->findInt32("frame-rate", &tmp)) { 4371 return INVALID_OPERATION; 4372 } 4373 frameRate = (float)tmp; 4374 } 4375 4376 AString tsSchema; 4377 OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE tsType = 4378 OMX_VIDEO_AndroidTemporalLayeringPatternNone; 4379 4380 if (msg->findString("ts-schema", &tsSchema)) { 4381 unsigned int numLayers = 0; 4382 unsigned int numBLayers = 0; 4383 int tags; 4384 char dummy; 4385 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1 4386 && numLayers > 0) { 4387 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4388 tsType = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC; 4389 tsLayers = numLayers; 4390 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c", 4391 &numLayers, &dummy, &numBLayers, &dummy)) 4392 && (tags == 1 || (tags == 3 && dummy == '+')) 4393 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) { 4394 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4395 // VPX does not have a concept of B-frames, so just count all layers 4396 tsType = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid; 4397 tsLayers = numLayers + numBLayers; 4398 } else { 4399 ALOGW("Ignoring unsupported ts-schema [%s]", tsSchema.c_str()); 4400 } 4401 tsLayers = min(tsLayers, (size_t)OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS); 4402 } 4403 4404 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4405 InitOMXParams(&vp8type); 4406 vp8type.nPortIndex = kPortIndexOutput; 4407 status_t err = mOMX->getParameter( 4408 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4409 &vp8type, sizeof(vp8type)); 4410 4411 if (err == OK) { 4412 if (iFrameInterval > 0) { 4413 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1; 4414 } 4415 vp8type.eTemporalPattern = pattern; 4416 vp8type.nTemporalLayerCount = tsLayers; 4417 if (tsLayers > 0) { 4418 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4419 vp8type.nTemporalLayerBitrateRatio[i] = 4420 kVp8LayerRateAlloction[tsLayers - 1][i]; 4421 } 4422 } 4423 if (bitrateMode == OMX_Video_ControlRateConstant) { 4424 vp8type.nMinQuantizer = 2; 4425 vp8type.nMaxQuantizer = 63; 4426 } 4427 4428 err = mOMX->setParameter( 4429 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4430 &vp8type, sizeof(vp8type)); 4431 if (err != OK) { 4432 ALOGW("Extended VP8 parameters set failed: %d", err); 4433 } else if (tsType == OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC) { 4434 // advertise even single layer WebRTC layering, as it is defined 4435 outputFormat->setString("ts-schema", AStringPrintf("webrtc.vp8.%u-layer", tsLayers)); 4436 } else if (tsLayers > 0) { 4437 // tsType == OMX_VIDEO_AndroidTemporalLayeringPatternAndroid 4438 outputFormat->setString("ts-schema", AStringPrintf("android.generic.%u", tsLayers)); 4439 } 4440 } 4441 4442 return configureBitrate(bitrate, bitrateMode); 4443} 4444 4445status_t ACodec::verifySupportForProfileAndLevel( 4446 int32_t profile, int32_t level) { 4447 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4448 InitOMXParams(¶ms); 4449 params.nPortIndex = kPortIndexOutput; 4450 4451 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4452 params.nProfileIndex = index; 4453 status_t err = mOMX->getParameter( 4454 mNode, 4455 OMX_IndexParamVideoProfileLevelQuerySupported, 4456 ¶ms, 4457 sizeof(params)); 4458 4459 if (err != OK) { 4460 return err; 4461 } 4462 4463 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4464 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4465 4466 if (profile == supportedProfile && level <= supportedLevel) { 4467 return OK; 4468 } 4469 4470 if (index == kMaxIndicesToCheck) { 4471 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4472 mComponentName.c_str(), index, 4473 params.eProfile, params.eLevel); 4474 } 4475 } 4476 return ERROR_UNSUPPORTED; 4477} 4478 4479status_t ACodec::configureBitrate( 4480 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4481 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4482 InitOMXParams(&bitrateType); 4483 bitrateType.nPortIndex = kPortIndexOutput; 4484 4485 status_t err = mOMX->getParameter( 4486 mNode, OMX_IndexParamVideoBitrate, 4487 &bitrateType, sizeof(bitrateType)); 4488 4489 if (err != OK) { 4490 return err; 4491 } 4492 4493 bitrateType.eControlRate = bitrateMode; 4494 bitrateType.nTargetBitrate = bitrate; 4495 4496 return mOMX->setParameter( 4497 mNode, OMX_IndexParamVideoBitrate, 4498 &bitrateType, sizeof(bitrateType)); 4499} 4500 4501status_t ACodec::setupErrorCorrectionParameters() { 4502 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4503 InitOMXParams(&errorCorrectionType); 4504 errorCorrectionType.nPortIndex = kPortIndexOutput; 4505 4506 status_t err = mOMX->getParameter( 4507 mNode, OMX_IndexParamVideoErrorCorrection, 4508 &errorCorrectionType, sizeof(errorCorrectionType)); 4509 4510 if (err != OK) { 4511 return OK; // Optional feature. Ignore this failure 4512 } 4513 4514 errorCorrectionType.bEnableHEC = OMX_FALSE; 4515 errorCorrectionType.bEnableResync = OMX_TRUE; 4516 errorCorrectionType.nResynchMarkerSpacing = 256; 4517 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4518 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4519 4520 return mOMX->setParameter( 4521 mNode, OMX_IndexParamVideoErrorCorrection, 4522 &errorCorrectionType, sizeof(errorCorrectionType)); 4523} 4524 4525status_t ACodec::setVideoFormatOnPort( 4526 OMX_U32 portIndex, 4527 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4528 float frameRate) { 4529 OMX_PARAM_PORTDEFINITIONTYPE def; 4530 InitOMXParams(&def); 4531 def.nPortIndex = portIndex; 4532 4533 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4534 4535 status_t err = mOMX->getParameter( 4536 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4537 if (err != OK) { 4538 return err; 4539 } 4540 4541 if (portIndex == kPortIndexInput) { 4542 // XXX Need a (much) better heuristic to compute input buffer sizes. 4543 const size_t X = 64 * 1024; 4544 if (def.nBufferSize < X) { 4545 def.nBufferSize = X; 4546 } 4547 } 4548 4549 if (def.eDomain != OMX_PortDomainVideo) { 4550 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4551 return FAILED_TRANSACTION; 4552 } 4553 4554 video_def->nFrameWidth = width; 4555 video_def->nFrameHeight = height; 4556 4557 if (portIndex == kPortIndexInput) { 4558 video_def->eCompressionFormat = compressionFormat; 4559 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4560 if (frameRate >= 0) { 4561 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4562 } 4563 } 4564 4565 err = mOMX->setParameter( 4566 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4567 4568 return err; 4569} 4570 4571status_t ACodec::initNativeWindow() { 4572 if (mNativeWindow != NULL) { 4573 return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4574 } 4575 4576 mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4577 return OK; 4578} 4579 4580size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4581 size_t n = 0; 4582 4583 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4584 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4585 4586 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4587 ++n; 4588 } 4589 } 4590 4591 return n; 4592} 4593 4594size_t ACodec::countBuffersOwnedByNativeWindow() const { 4595 size_t n = 0; 4596 4597 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4598 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4599 4600 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4601 ++n; 4602 } 4603 } 4604 4605 return n; 4606} 4607 4608void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4609 if (mNativeWindow == NULL) { 4610 return; 4611 } 4612 4613 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4614 && dequeueBufferFromNativeWindow() != NULL) { 4615 // these buffers will be submitted as regular buffers; account for this 4616 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4617 --mMetadataBuffersToSubmit; 4618 } 4619 } 4620} 4621 4622bool ACodec::allYourBuffersAreBelongToUs( 4623 OMX_U32 portIndex) { 4624 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4625 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4626 4627 if (info->mStatus != BufferInfo::OWNED_BY_US 4628 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4629 ALOGV("[%s] Buffer %u on port %u still has status %d", 4630 mComponentName.c_str(), 4631 info->mBufferID, portIndex, info->mStatus); 4632 return false; 4633 } 4634 } 4635 4636 return true; 4637} 4638 4639bool ACodec::allYourBuffersAreBelongToUs() { 4640 return allYourBuffersAreBelongToUs(kPortIndexInput) 4641 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4642} 4643 4644void ACodec::deferMessage(const sp<AMessage> &msg) { 4645 mDeferredQueue.push_back(msg); 4646} 4647 4648void ACodec::processDeferredMessages() { 4649 List<sp<AMessage> > queue = mDeferredQueue; 4650 mDeferredQueue.clear(); 4651 4652 List<sp<AMessage> >::iterator it = queue.begin(); 4653 while (it != queue.end()) { 4654 onMessageReceived(*it++); 4655 } 4656} 4657 4658status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4659 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4660 OMX_PARAM_PORTDEFINITIONTYPE def; 4661 InitOMXParams(&def); 4662 def.nPortIndex = portIndex; 4663 4664 status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4665 if (err != OK) { 4666 return err; 4667 } 4668 4669 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4670 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4671 return BAD_VALUE; 4672 } 4673 4674 switch (def.eDomain) { 4675 case OMX_PortDomainVideo: 4676 { 4677 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4678 switch ((int)videoDef->eCompressionFormat) { 4679 case OMX_VIDEO_CodingUnused: 4680 { 4681 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4682 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4683 4684 notify->setInt32("stride", videoDef->nStride); 4685 notify->setInt32("slice-height", videoDef->nSliceHeight); 4686 notify->setInt32("color-format", videoDef->eColorFormat); 4687 4688 if (mNativeWindow == NULL) { 4689 DescribeColorFormat2Params describeParams; 4690 InitOMXParams(&describeParams); 4691 describeParams.eColorFormat = videoDef->eColorFormat; 4692 describeParams.nFrameWidth = videoDef->nFrameWidth; 4693 describeParams.nFrameHeight = videoDef->nFrameHeight; 4694 describeParams.nStride = videoDef->nStride; 4695 describeParams.nSliceHeight = videoDef->nSliceHeight; 4696 describeParams.bUsingNativeBuffers = OMX_FALSE; 4697 4698 if (DescribeColorFormat(mOMX, mNode, describeParams)) { 4699 notify->setBuffer( 4700 "image-data", 4701 ABuffer::CreateAsCopy( 4702 &describeParams.sMediaImage, 4703 sizeof(describeParams.sMediaImage))); 4704 4705 MediaImage2 &img = describeParams.sMediaImage; 4706 MediaImage2::PlaneInfo *plane = img.mPlane; 4707 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4708 mComponentName.c_str(), img.mWidth, img.mHeight, 4709 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4710 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4711 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4712 } 4713 } 4714 4715 int32_t width = (int32_t)videoDef->nFrameWidth; 4716 int32_t height = (int32_t)videoDef->nFrameHeight; 4717 4718 if (portIndex == kPortIndexOutput) { 4719 OMX_CONFIG_RECTTYPE rect; 4720 InitOMXParams(&rect); 4721 rect.nPortIndex = portIndex; 4722 4723 if (mOMX->getConfig( 4724 mNode, 4725 (portIndex == kPortIndexOutput ? 4726 OMX_IndexConfigCommonOutputCrop : 4727 OMX_IndexConfigCommonInputCrop), 4728 &rect, sizeof(rect)) != OK) { 4729 rect.nLeft = 0; 4730 rect.nTop = 0; 4731 rect.nWidth = videoDef->nFrameWidth; 4732 rect.nHeight = videoDef->nFrameHeight; 4733 } 4734 4735 if (rect.nLeft < 0 || 4736 rect.nTop < 0 || 4737 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4738 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4739 ALOGE("Wrong cropped rect (%d, %d, %u, %u) vs. frame (%u, %u)", 4740 rect.nLeft, rect.nTop, 4741 rect.nWidth, rect.nHeight, 4742 videoDef->nFrameWidth, videoDef->nFrameHeight); 4743 return BAD_VALUE; 4744 } 4745 4746 notify->setRect( 4747 "crop", 4748 rect.nLeft, 4749 rect.nTop, 4750 rect.nLeft + rect.nWidth - 1, 4751 rect.nTop + rect.nHeight - 1); 4752 4753 width = rect.nWidth; 4754 height = rect.nHeight; 4755 4756 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4757 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4758 width, height, mConfigFormat, notify, 4759 mUsingNativeWindow ? &dataSpace : NULL); 4760 if (mUsingNativeWindow) { 4761 notify->setInt32("android._dataspace", dataSpace); 4762 } 4763 (void)getHDRStaticInfoForVideoCodec(kPortIndexOutput, notify); 4764 } else { 4765 (void)getInputColorAspectsForVideoEncoder(notify); 4766 if (mConfigFormat->contains("hdr-static-info")) { 4767 (void)getHDRStaticInfoForVideoCodec(kPortIndexInput, notify); 4768 } 4769 } 4770 4771 break; 4772 } 4773 4774 case OMX_VIDEO_CodingVP8: 4775 case OMX_VIDEO_CodingVP9: 4776 { 4777 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4778 InitOMXParams(&vp8type); 4779 vp8type.nPortIndex = kPortIndexOutput; 4780 status_t err = mOMX->getParameter( 4781 mNode, 4782 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4783 &vp8type, 4784 sizeof(vp8type)); 4785 4786 if (err == OK) { 4787 if (vp8type.eTemporalPattern == OMX_VIDEO_VPXTemporalLayerPatternWebRTC 4788 && vp8type.nTemporalLayerCount > 0 4789 && vp8type.nTemporalLayerCount 4790 <= OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS) { 4791 // advertise as android.generic if we configured for android.generic 4792 AString origSchema; 4793 if (notify->findString("ts-schema", &origSchema) 4794 && origSchema.startsWith("android.generic")) { 4795 notify->setString("ts-schema", AStringPrintf( 4796 "android.generic.%u", vp8type.nTemporalLayerCount)); 4797 } else { 4798 notify->setString("ts-schema", AStringPrintf( 4799 "webrtc.vp8.%u-layer", vp8type.nTemporalLayerCount)); 4800 } 4801 } 4802 } 4803 // Fall through to set up mime. 4804 } 4805 4806 default: 4807 { 4808 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 4809 // should be CodingUnused 4810 ALOGE("Raw port video compression format is %s(%d)", 4811 asString(videoDef->eCompressionFormat), 4812 videoDef->eCompressionFormat); 4813 return BAD_VALUE; 4814 } 4815 AString mime; 4816 if (GetMimeTypeForVideoCoding( 4817 videoDef->eCompressionFormat, &mime) != OK) { 4818 notify->setString("mime", "application/octet-stream"); 4819 } else { 4820 notify->setString("mime", mime.c_str()); 4821 } 4822 uint32_t intraRefreshPeriod = 0; 4823 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 4824 && intraRefreshPeriod > 0) { 4825 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 4826 } 4827 break; 4828 } 4829 } 4830 notify->setInt32("width", videoDef->nFrameWidth); 4831 notify->setInt32("height", videoDef->nFrameHeight); 4832 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 4833 portIndex == kPortIndexInput ? "input" : "output", 4834 notify->debugString().c_str()); 4835 4836 break; 4837 } 4838 4839 case OMX_PortDomainAudio: 4840 { 4841 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 4842 4843 switch ((int)audioDef->eEncoding) { 4844 case OMX_AUDIO_CodingPCM: 4845 { 4846 OMX_AUDIO_PARAM_PCMMODETYPE params; 4847 InitOMXParams(¶ms); 4848 params.nPortIndex = portIndex; 4849 4850 err = mOMX->getParameter( 4851 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4852 if (err != OK) { 4853 return err; 4854 } 4855 4856 if (params.nChannels <= 0 4857 || (params.nChannels != 1 && !params.bInterleaved) 4858 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 4859 ALOGE("unsupported PCM port: %u channels%s, %u-bit", 4860 params.nChannels, 4861 params.bInterleaved ? " interleaved" : "", 4862 params.nBitPerSample); 4863 return FAILED_TRANSACTION; 4864 } 4865 4866 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 4867 notify->setInt32("channel-count", params.nChannels); 4868 notify->setInt32("sample-rate", params.nSamplingRate); 4869 4870 AudioEncoding encoding = kAudioEncodingPcm16bit; 4871 if (params.eNumData == OMX_NumericalDataUnsigned 4872 && params.nBitPerSample == 8u) { 4873 encoding = kAudioEncodingPcm8bit; 4874 } else if (params.eNumData == OMX_NumericalDataFloat 4875 && params.nBitPerSample == 32u) { 4876 encoding = kAudioEncodingPcmFloat; 4877 } else if (params.nBitPerSample != 16u 4878 || params.eNumData != OMX_NumericalDataSigned) { 4879 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", 4880 asString(params.eNumData), params.eNumData, 4881 asString(params.ePCMMode), params.ePCMMode); 4882 return FAILED_TRANSACTION; 4883 } 4884 notify->setInt32("pcm-encoding", encoding); 4885 4886 if (mChannelMaskPresent) { 4887 notify->setInt32("channel-mask", mChannelMask); 4888 } 4889 break; 4890 } 4891 4892 case OMX_AUDIO_CodingAAC: 4893 { 4894 OMX_AUDIO_PARAM_AACPROFILETYPE params; 4895 InitOMXParams(¶ms); 4896 params.nPortIndex = portIndex; 4897 4898 err = mOMX->getParameter( 4899 mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 4900 if (err != OK) { 4901 return err; 4902 } 4903 4904 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 4905 notify->setInt32("channel-count", params.nChannels); 4906 notify->setInt32("sample-rate", params.nSampleRate); 4907 break; 4908 } 4909 4910 case OMX_AUDIO_CodingAMR: 4911 { 4912 OMX_AUDIO_PARAM_AMRTYPE params; 4913 InitOMXParams(¶ms); 4914 params.nPortIndex = portIndex; 4915 4916 err = mOMX->getParameter( 4917 mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 4918 if (err != OK) { 4919 return err; 4920 } 4921 4922 notify->setInt32("channel-count", 1); 4923 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 4924 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 4925 notify->setInt32("sample-rate", 16000); 4926 } else { 4927 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 4928 notify->setInt32("sample-rate", 8000); 4929 } 4930 break; 4931 } 4932 4933 case OMX_AUDIO_CodingFLAC: 4934 { 4935 OMX_AUDIO_PARAM_FLACTYPE params; 4936 InitOMXParams(¶ms); 4937 params.nPortIndex = portIndex; 4938 4939 err = mOMX->getParameter( 4940 mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 4941 if (err != OK) { 4942 return err; 4943 } 4944 4945 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 4946 notify->setInt32("channel-count", params.nChannels); 4947 notify->setInt32("sample-rate", params.nSampleRate); 4948 break; 4949 } 4950 4951 case OMX_AUDIO_CodingMP3: 4952 { 4953 OMX_AUDIO_PARAM_MP3TYPE params; 4954 InitOMXParams(¶ms); 4955 params.nPortIndex = portIndex; 4956 4957 err = mOMX->getParameter( 4958 mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 4959 if (err != OK) { 4960 return err; 4961 } 4962 4963 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 4964 notify->setInt32("channel-count", params.nChannels); 4965 notify->setInt32("sample-rate", params.nSampleRate); 4966 break; 4967 } 4968 4969 case OMX_AUDIO_CodingVORBIS: 4970 { 4971 OMX_AUDIO_PARAM_VORBISTYPE params; 4972 InitOMXParams(¶ms); 4973 params.nPortIndex = portIndex; 4974 4975 err = mOMX->getParameter( 4976 mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 4977 if (err != OK) { 4978 return err; 4979 } 4980 4981 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 4982 notify->setInt32("channel-count", params.nChannels); 4983 notify->setInt32("sample-rate", params.nSampleRate); 4984 break; 4985 } 4986 4987 case OMX_AUDIO_CodingAndroidAC3: 4988 { 4989 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 4990 InitOMXParams(¶ms); 4991 params.nPortIndex = portIndex; 4992 4993 err = mOMX->getParameter( 4994 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 4995 ¶ms, sizeof(params)); 4996 if (err != OK) { 4997 return err; 4998 } 4999 5000 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 5001 notify->setInt32("channel-count", params.nChannels); 5002 notify->setInt32("sample-rate", params.nSampleRate); 5003 break; 5004 } 5005 5006 case OMX_AUDIO_CodingAndroidEAC3: 5007 { 5008 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 5009 InitOMXParams(¶ms); 5010 params.nPortIndex = portIndex; 5011 5012 err = mOMX->getParameter( 5013 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 5014 ¶ms, sizeof(params)); 5015 if (err != OK) { 5016 return err; 5017 } 5018 5019 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 5020 notify->setInt32("channel-count", params.nChannels); 5021 notify->setInt32("sample-rate", params.nSampleRate); 5022 break; 5023 } 5024 5025 case OMX_AUDIO_CodingAndroidOPUS: 5026 { 5027 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 5028 InitOMXParams(¶ms); 5029 params.nPortIndex = portIndex; 5030 5031 err = mOMX->getParameter( 5032 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 5033 ¶ms, sizeof(params)); 5034 if (err != OK) { 5035 return err; 5036 } 5037 5038 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 5039 notify->setInt32("channel-count", params.nChannels); 5040 notify->setInt32("sample-rate", params.nSampleRate); 5041 break; 5042 } 5043 5044 case OMX_AUDIO_CodingG711: 5045 { 5046 OMX_AUDIO_PARAM_PCMMODETYPE params; 5047 InitOMXParams(¶ms); 5048 params.nPortIndex = portIndex; 5049 5050 err = mOMX->getParameter( 5051 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5052 if (err != OK) { 5053 return err; 5054 } 5055 5056 const char *mime = NULL; 5057 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 5058 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 5059 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 5060 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 5061 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 5062 mime = MEDIA_MIMETYPE_AUDIO_RAW; 5063 } 5064 notify->setString("mime", mime); 5065 notify->setInt32("channel-count", params.nChannels); 5066 notify->setInt32("sample-rate", params.nSamplingRate); 5067 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 5068 break; 5069 } 5070 5071 case OMX_AUDIO_CodingGSMFR: 5072 { 5073 OMX_AUDIO_PARAM_PCMMODETYPE params; 5074 InitOMXParams(¶ms); 5075 params.nPortIndex = portIndex; 5076 5077 err = mOMX->getParameter( 5078 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5079 if (err != OK) { 5080 return err; 5081 } 5082 5083 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 5084 notify->setInt32("channel-count", params.nChannels); 5085 notify->setInt32("sample-rate", params.nSamplingRate); 5086 break; 5087 } 5088 5089 default: 5090 ALOGE("Unsupported audio coding: %s(%d)\n", 5091 asString(audioDef->eEncoding), audioDef->eEncoding); 5092 return BAD_TYPE; 5093 } 5094 break; 5095 } 5096 5097 default: 5098 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 5099 return BAD_TYPE; 5100 } 5101 5102 return OK; 5103} 5104 5105void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 5106 // aspects are normally communicated in ColorAspects 5107 int32_t range, standard, transfer; 5108 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 5109 5110 // if some aspects are unspecified, use dataspace fields 5111 if (range != 0) { 5112 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 5113 } 5114 if (standard != 0) { 5115 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 5116 } 5117 if (transfer != 0) { 5118 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 5119 } 5120 5121 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 5122 if (range != 0) { 5123 mOutputFormat->setInt32("color-range", range); 5124 } 5125 if (standard != 0) { 5126 mOutputFormat->setInt32("color-standard", standard); 5127 } 5128 if (transfer != 0) { 5129 mOutputFormat->setInt32("color-transfer", transfer); 5130 } 5131 5132 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 5133 "(R:%d(%s), S:%d(%s), T:%d(%s))", 5134 dataSpace, 5135 aspects.mRange, asString(aspects.mRange), 5136 aspects.mPrimaries, asString(aspects.mPrimaries), 5137 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 5138 aspects.mTransfer, asString(aspects.mTransfer), 5139 range, asString((ColorRange)range), 5140 standard, asString((ColorStandard)standard), 5141 transfer, asString((ColorTransfer)transfer)); 5142} 5143 5144void ACodec::onOutputFormatChanged(sp<const AMessage> expectedFormat) { 5145 // store new output format, at the same time mark that this is no longer the first frame 5146 mOutputFormat = mBaseOutputFormat->dup(); 5147 5148 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 5149 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 5150 return; 5151 } 5152 5153 if (expectedFormat != NULL) { 5154 sp<const AMessage> changes = expectedFormat->changesFrom(mOutputFormat); 5155 sp<const AMessage> to = mOutputFormat->changesFrom(expectedFormat); 5156 if (changes->countEntries() != 0 || to->countEntries() != 0) { 5157 ALOGW("[%s] BAD CODEC: Output format changed unexpectedly from (diff) %s to (diff) %s", 5158 mComponentName.c_str(), 5159 changes->debugString(4).c_str(), to->debugString(4).c_str()); 5160 } 5161 } 5162 5163 if (!mIsVideo && !mIsEncoder) { 5164 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 5165 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5166 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 5167 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5168 5169 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 5170 if (mConverter[kPortIndexOutput] != NULL) { 5171 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 5172 } 5173 } 5174 5175 if (mTunneled) { 5176 sendFormatChange(); 5177 } 5178} 5179 5180void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 5181 AString mime; 5182 CHECK(mOutputFormat->findString("mime", &mime)); 5183 5184 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 5185 // notify renderer of the crop change and dataspace change 5186 // NOTE: native window uses extended right-bottom coordinate 5187 int32_t left, top, right, bottom; 5188 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 5189 notify->setRect("crop", left, top, right + 1, bottom + 1); 5190 } 5191 5192 int32_t dataSpace; 5193 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 5194 notify->setInt32("dataspace", dataSpace); 5195 } 5196 } 5197} 5198 5199void ACodec::sendFormatChange() { 5200 AString mime; 5201 CHECK(mOutputFormat->findString("mime", &mime)); 5202 5203 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 5204 int32_t channelCount, sampleRate; 5205 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 5206 CHECK(mOutputFormat->findInt32("sample-rate", &sampleRate)); 5207 if (mSampleRate != 0 && sampleRate != 0) { 5208 mEncoderDelay = mEncoderDelay * sampleRate / mSampleRate; 5209 mEncoderPadding = mEncoderPadding * sampleRate / mSampleRate; 5210 mSampleRate = sampleRate; 5211 } 5212 if (mSkipCutBuffer != NULL) { 5213 size_t prevbufsize = mSkipCutBuffer->size(); 5214 if (prevbufsize != 0) { 5215 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 5216 } 5217 } 5218 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 5219 } 5220 5221 sp<AMessage> notify = mNotify->dup(); 5222 notify->setInt32("what", kWhatOutputFormatChanged); 5223 notify->setMessage("format", mOutputFormat); 5224 notify->post(); 5225 5226 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 5227 mLastOutputFormat = mOutputFormat; 5228} 5229 5230void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 5231 sp<AMessage> notify = mNotify->dup(); 5232 notify->setInt32("what", CodecBase::kWhatError); 5233 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 5234 5235 if (internalError == UNKNOWN_ERROR) { // find better error code 5236 const status_t omxStatus = statusFromOMXError(error); 5237 if (omxStatus != 0) { 5238 internalError = omxStatus; 5239 } else { 5240 ALOGW("Invalid OMX error %#x", error); 5241 } 5242 } 5243 5244 mFatalError = true; 5245 5246 notify->setInt32("err", internalError); 5247 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5248 notify->post(); 5249} 5250 5251//////////////////////////////////////////////////////////////////////////////// 5252 5253ACodec::PortDescription::PortDescription() { 5254} 5255 5256status_t ACodec::requestIDRFrame() { 5257 if (!mIsEncoder) { 5258 return ERROR_UNSUPPORTED; 5259 } 5260 5261 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5262 InitOMXParams(¶ms); 5263 5264 params.nPortIndex = kPortIndexOutput; 5265 params.IntraRefreshVOP = OMX_TRUE; 5266 5267 return mOMX->setConfig( 5268 mNode, 5269 OMX_IndexConfigVideoIntraVOPRefresh, 5270 ¶ms, 5271 sizeof(params)); 5272} 5273 5274void ACodec::PortDescription::addBuffer( 5275 IOMX::buffer_id id, const sp<ABuffer> &buffer, 5276 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) { 5277 mBufferIDs.push_back(id); 5278 mBuffers.push_back(buffer); 5279 mHandles.push_back(handle); 5280 mMemRefs.push_back(memRef); 5281} 5282 5283size_t ACodec::PortDescription::countBuffers() { 5284 return mBufferIDs.size(); 5285} 5286 5287IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5288 return mBufferIDs.itemAt(index); 5289} 5290 5291sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5292 return mBuffers.itemAt(index); 5293} 5294 5295sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const { 5296 return mHandles.itemAt(index); 5297} 5298 5299sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const { 5300 return mMemRefs.itemAt(index); 5301} 5302 5303//////////////////////////////////////////////////////////////////////////////// 5304 5305ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5306 : AState(parentState), 5307 mCodec(codec) { 5308} 5309 5310ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5311 OMX_U32 /* portIndex */) { 5312 return KEEP_BUFFERS; 5313} 5314 5315bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5316 switch (msg->what()) { 5317 case kWhatInputBufferFilled: 5318 { 5319 onInputBufferFilled(msg); 5320 break; 5321 } 5322 5323 case kWhatOutputBufferDrained: 5324 { 5325 onOutputBufferDrained(msg); 5326 break; 5327 } 5328 5329 case ACodec::kWhatOMXMessageList: 5330 { 5331 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5332 } 5333 5334 case ACodec::kWhatOMXMessageItem: 5335 { 5336 // no need to check as we already did it for kWhatOMXMessageList 5337 return onOMXMessage(msg); 5338 } 5339 5340 case ACodec::kWhatOMXMessage: 5341 { 5342 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5343 } 5344 5345 case ACodec::kWhatSetSurface: 5346 { 5347 sp<AReplyToken> replyID; 5348 CHECK(msg->senderAwaitsResponse(&replyID)); 5349 5350 sp<RefBase> obj; 5351 CHECK(msg->findObject("surface", &obj)); 5352 5353 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5354 5355 sp<AMessage> response = new AMessage; 5356 response->setInt32("err", err); 5357 response->postReply(replyID); 5358 break; 5359 } 5360 5361 case ACodec::kWhatCreateInputSurface: 5362 case ACodec::kWhatSetInputSurface: 5363 case ACodec::kWhatSignalEndOfInputStream: 5364 { 5365 // This may result in an app illegal state exception. 5366 ALOGE("Message 0x%x was not handled", msg->what()); 5367 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5368 return true; 5369 } 5370 5371 case ACodec::kWhatOMXDied: 5372 { 5373 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5374 ALOGE("OMX/mediaserver died, signalling error!"); 5375 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5376 break; 5377 } 5378 5379 case ACodec::kWhatReleaseCodecInstance: 5380 { 5381 ALOGI("[%s] forcing the release of codec", 5382 mCodec->mComponentName.c_str()); 5383 status_t err = mCodec->mOMX->freeNode(mCodec->mNode); 5384 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5385 mCodec->mComponentName.c_str(), err); 5386 sp<AMessage> notify = mCodec->mNotify->dup(); 5387 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5388 notify->post(); 5389 break; 5390 } 5391 5392 default: 5393 return false; 5394 } 5395 5396 return true; 5397} 5398 5399bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5400 // there is a possibility that this is an outstanding message for a 5401 // codec that we have already destroyed 5402 if (mCodec->mNode == 0) { 5403 ALOGI("ignoring message as already freed component: %s", 5404 msg->debugString().c_str()); 5405 return false; 5406 } 5407 5408 IOMX::node_id nodeID; 5409 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 5410 if (nodeID != mCodec->mNode) { 5411 ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode); 5412 return false; 5413 } 5414 return true; 5415} 5416 5417bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5418 sp<RefBase> obj; 5419 CHECK(msg->findObject("messages", &obj)); 5420 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5421 5422 bool receivedRenderedEvents = false; 5423 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5424 it != msgList->getList().cend(); ++it) { 5425 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5426 mCodec->handleMessage(*it); 5427 int32_t type; 5428 CHECK((*it)->findInt32("type", &type)); 5429 if (type == omx_message::FRAME_RENDERED) { 5430 receivedRenderedEvents = true; 5431 } 5432 } 5433 5434 if (receivedRenderedEvents) { 5435 // NOTE: all buffers are rendered in this case 5436 mCodec->notifyOfRenderedFrames(); 5437 } 5438 return true; 5439} 5440 5441bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5442 int32_t type; 5443 CHECK(msg->findInt32("type", &type)); 5444 5445 switch (type) { 5446 case omx_message::EVENT: 5447 { 5448 int32_t event, data1, data2; 5449 CHECK(msg->findInt32("event", &event)); 5450 CHECK(msg->findInt32("data1", &data1)); 5451 CHECK(msg->findInt32("data2", &data2)); 5452 5453 if (event == OMX_EventCmdComplete 5454 && data1 == OMX_CommandFlush 5455 && data2 == (int32_t)OMX_ALL) { 5456 // Use of this notification is not consistent across 5457 // implementations. We'll drop this notification and rely 5458 // on flush-complete notifications on the individual port 5459 // indices instead. 5460 5461 return true; 5462 } 5463 5464 return onOMXEvent( 5465 static_cast<OMX_EVENTTYPE>(event), 5466 static_cast<OMX_U32>(data1), 5467 static_cast<OMX_U32>(data2)); 5468 } 5469 5470 case omx_message::EMPTY_BUFFER_DONE: 5471 { 5472 IOMX::buffer_id bufferID; 5473 int32_t fenceFd; 5474 5475 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5476 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5477 5478 return onOMXEmptyBufferDone(bufferID, fenceFd); 5479 } 5480 5481 case omx_message::FILL_BUFFER_DONE: 5482 { 5483 IOMX::buffer_id bufferID; 5484 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5485 5486 int32_t rangeOffset, rangeLength, flags, fenceFd; 5487 int64_t timeUs; 5488 5489 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5490 CHECK(msg->findInt32("range_length", &rangeLength)); 5491 CHECK(msg->findInt32("flags", &flags)); 5492 CHECK(msg->findInt64("timestamp", &timeUs)); 5493 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5494 5495 return onOMXFillBufferDone( 5496 bufferID, 5497 (size_t)rangeOffset, (size_t)rangeLength, 5498 (OMX_U32)flags, 5499 timeUs, 5500 fenceFd); 5501 } 5502 5503 case omx_message::FRAME_RENDERED: 5504 { 5505 int64_t mediaTimeUs, systemNano; 5506 5507 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5508 CHECK(msg->findInt64("system_nano", &systemNano)); 5509 5510 return onOMXFrameRendered( 5511 mediaTimeUs, systemNano); 5512 } 5513 5514 default: 5515 ALOGE("Unexpected message type: %d", type); 5516 return false; 5517 } 5518} 5519 5520bool ACodec::BaseState::onOMXFrameRendered( 5521 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5522 // ignore outside of Executing and PortSettingsChanged states 5523 return true; 5524} 5525 5526bool ACodec::BaseState::onOMXEvent( 5527 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5528 if (event == OMX_EventDataSpaceChanged) { 5529 ColorAspects aspects; 5530 aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF); 5531 aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF); 5532 aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF); 5533 aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF); 5534 5535 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5536 return true; 5537 } 5538 5539 if (event != OMX_EventError) { 5540 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5541 mCodec->mComponentName.c_str(), event, data1, data2); 5542 5543 return false; 5544 } 5545 5546 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5547 5548 // verify OMX component sends back an error we expect. 5549 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5550 if (!isOMXError(omxError)) { 5551 ALOGW("Invalid OMX error %#x", omxError); 5552 omxError = OMX_ErrorUndefined; 5553 } 5554 mCodec->signalError(omxError); 5555 5556 return true; 5557} 5558 5559bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5560 ALOGV("[%s] onOMXEmptyBufferDone %u", 5561 mCodec->mComponentName.c_str(), bufferID); 5562 5563 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5564 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5565 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5566 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5567 mCodec->dumpBuffers(kPortIndexInput); 5568 if (fenceFd >= 0) { 5569 ::close(fenceFd); 5570 } 5571 return false; 5572 } 5573 info->mStatus = BufferInfo::OWNED_BY_US; 5574 5575 // input buffers cannot take fences, so wait for any fence now 5576 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5577 fenceFd = -1; 5578 5579 // still save fence for completeness 5580 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5581 5582 // We're in "store-metadata-in-buffers" mode, the underlying 5583 // OMX component had access to data that's implicitly refcounted 5584 // by this "MediaBuffer" object. Now that the OMX component has 5585 // told us that it's done with the input buffer, we can decrement 5586 // the mediaBuffer's reference count. 5587 info->mData->setMediaBufferBase(NULL); 5588 5589 PortMode mode = getPortMode(kPortIndexInput); 5590 5591 switch (mode) { 5592 case KEEP_BUFFERS: 5593 break; 5594 5595 case RESUBMIT_BUFFERS: 5596 postFillThisBuffer(info); 5597 break; 5598 5599 case FREE_BUFFERS: 5600 default: 5601 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5602 return false; 5603 } 5604 5605 return true; 5606} 5607 5608void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5609 if (mCodec->mPortEOS[kPortIndexInput]) { 5610 return; 5611 } 5612 5613 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5614 5615 sp<AMessage> notify = mCodec->mNotify->dup(); 5616 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5617 notify->setInt32("buffer-id", info->mBufferID); 5618 5619 info->mData->meta()->clear(); 5620 notify->setBuffer("buffer", info->mData); 5621 5622 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5623 reply->setInt32("buffer-id", info->mBufferID); 5624 5625 notify->setMessage("reply", reply); 5626 5627 notify->post(); 5628 5629 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5630} 5631 5632void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5633 IOMX::buffer_id bufferID; 5634 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5635 sp<ABuffer> buffer; 5636 int32_t err = OK; 5637 bool eos = false; 5638 PortMode mode = getPortMode(kPortIndexInput); 5639 5640 if (!msg->findBuffer("buffer", &buffer)) { 5641 /* these are unfilled buffers returned by client */ 5642 CHECK(msg->findInt32("err", &err)); 5643 5644 if (err == OK) { 5645 /* buffers with no errors are returned on MediaCodec.flush */ 5646 mode = KEEP_BUFFERS; 5647 } else { 5648 ALOGV("[%s] saw error %d instead of an input buffer", 5649 mCodec->mComponentName.c_str(), err); 5650 eos = true; 5651 } 5652 5653 buffer.clear(); 5654 } 5655 5656 int32_t tmp; 5657 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5658 eos = true; 5659 err = ERROR_END_OF_STREAM; 5660 } 5661 5662 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5663 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5664 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5665 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5666 mCodec->dumpBuffers(kPortIndexInput); 5667 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5668 return; 5669 } 5670 5671 info->mStatus = BufferInfo::OWNED_BY_US; 5672 5673 switch (mode) { 5674 case KEEP_BUFFERS: 5675 { 5676 if (eos) { 5677 if (!mCodec->mPortEOS[kPortIndexInput]) { 5678 mCodec->mPortEOS[kPortIndexInput] = true; 5679 mCodec->mInputEOSResult = err; 5680 } 5681 } 5682 break; 5683 } 5684 5685 case RESUBMIT_BUFFERS: 5686 { 5687 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5688 // Do not send empty input buffer w/o EOS to the component. 5689 if (buffer->size() == 0 && !eos) { 5690 postFillThisBuffer(info); 5691 break; 5692 } 5693 5694 int64_t timeUs; 5695 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5696 5697 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5698 5699 MetadataBufferType metaType = mCodec->mInputMetadataType; 5700 int32_t isCSD = 0; 5701 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5702 if (mCodec->mIsLegacyVP9Decoder) { 5703 ALOGV("[%s] is legacy VP9 decoder. Ignore %u codec specific data", 5704 mCodec->mComponentName.c_str(), bufferID); 5705 postFillThisBuffer(info); 5706 break; 5707 } 5708 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5709 metaType = kMetadataBufferTypeInvalid; 5710 } 5711 5712 if (eos) { 5713 flags |= OMX_BUFFERFLAG_EOS; 5714 } 5715 5716 if (buffer != info->mCodecData) { 5717 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5718 mCodec->mComponentName.c_str(), 5719 bufferID, 5720 buffer.get(), info->mCodecData.get()); 5721 5722 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; 5723 if (converter == NULL || isCSD) { 5724 converter = getCopyConverter(); 5725 } 5726 status_t err = converter->convert(buffer, info->mCodecData); 5727 if (err != OK) { 5728 mCodec->signalError(OMX_ErrorUndefined, err); 5729 return; 5730 } 5731 } 5732 5733 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5734 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5735 mCodec->mComponentName.c_str(), bufferID); 5736 } else if (flags & OMX_BUFFERFLAG_EOS) { 5737 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5738 mCodec->mComponentName.c_str(), bufferID); 5739 } else { 5740#if TRACK_BUFFER_TIMING 5741 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5742 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5743#else 5744 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5745 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5746#endif 5747 } 5748 5749#if TRACK_BUFFER_TIMING 5750 ACodec::BufferStats stats; 5751 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5752 stats.mFillBufferDoneTimeUs = -1ll; 5753 mCodec->mBufferStats.add(timeUs, stats); 5754#endif 5755 5756 if (mCodec->storingMetadataInDecodedBuffers()) { 5757 // try to submit an output buffer for each input buffer 5758 PortMode outputMode = getPortMode(kPortIndexOutput); 5759 5760 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 5761 mCodec->mMetadataBuffersToSubmit, 5762 (outputMode == FREE_BUFFERS ? "FREE" : 5763 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 5764 if (outputMode == RESUBMIT_BUFFERS) { 5765 mCodec->submitOutputMetadataBuffer(); 5766 } 5767 } 5768 info->checkReadFence("onInputBufferFilled"); 5769 5770 status_t err2 = OK; 5771 switch (metaType) { 5772 case kMetadataBufferTypeInvalid: 5773 break; 5774#ifndef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5775 case kMetadataBufferTypeNativeHandleSource: 5776 if (info->mCodecData->size() >= sizeof(VideoNativeHandleMetadata)) { 5777 VideoNativeHandleMetadata *vnhmd = 5778 (VideoNativeHandleMetadata*)info->mCodecData->base(); 5779 err2 = mCodec->mOMX->updateNativeHandleInMeta( 5780 mCodec->mNode, kPortIndexInput, 5781 NativeHandle::create(vnhmd->pHandle, false /* ownsHandle */), 5782 bufferID); 5783 } 5784 break; 5785 case kMetadataBufferTypeANWBuffer: 5786 if (info->mCodecData->size() >= sizeof(VideoNativeMetadata)) { 5787 VideoNativeMetadata *vnmd = (VideoNativeMetadata*)info->mCodecData->base(); 5788 err2 = mCodec->mOMX->updateGraphicBufferInMeta( 5789 mCodec->mNode, kPortIndexInput, 5790 new GraphicBuffer(vnmd->pBuffer, false /* keepOwnership */), 5791 bufferID); 5792 } 5793 break; 5794#endif 5795 default: 5796 ALOGW("Can't marshall %s data in %zu sized buffers in %zu-bit mode", 5797 asString(metaType), info->mCodecData->size(), 5798 sizeof(buffer_handle_t) * 8); 5799 err2 = ERROR_UNSUPPORTED; 5800 break; 5801 } 5802 5803 if (err2 == OK) { 5804 err2 = mCodec->mOMX->emptyBuffer( 5805 mCodec->mNode, 5806 bufferID, 5807 0, 5808 info->mCodecData->size(), 5809 flags, 5810 timeUs, 5811 info->mFenceFd); 5812 } 5813 info->mFenceFd = -1; 5814 if (err2 != OK) { 5815 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5816 return; 5817 } 5818 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5819 5820 if (!eos && err == OK) { 5821 getMoreInputDataIfPossible(); 5822 } else { 5823 ALOGV("[%s] Signalled EOS (%d) on the input port", 5824 mCodec->mComponentName.c_str(), err); 5825 5826 mCodec->mPortEOS[kPortIndexInput] = true; 5827 mCodec->mInputEOSResult = err; 5828 } 5829 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 5830 if (err != OK && err != ERROR_END_OF_STREAM) { 5831 ALOGV("[%s] Signalling EOS on the input port due to error %d", 5832 mCodec->mComponentName.c_str(), err); 5833 } else { 5834 ALOGV("[%s] Signalling EOS on the input port", 5835 mCodec->mComponentName.c_str()); 5836 } 5837 5838 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 5839 mCodec->mComponentName.c_str(), bufferID); 5840 5841 info->checkReadFence("onInputBufferFilled"); 5842 status_t err2 = mCodec->mOMX->emptyBuffer( 5843 mCodec->mNode, 5844 bufferID, 5845 0, 5846 0, 5847 OMX_BUFFERFLAG_EOS, 5848 0, 5849 info->mFenceFd); 5850 info->mFenceFd = -1; 5851 if (err2 != OK) { 5852 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5853 return; 5854 } 5855 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5856 5857 mCodec->mPortEOS[kPortIndexInput] = true; 5858 mCodec->mInputEOSResult = err; 5859 } 5860 break; 5861 } 5862 5863 case FREE_BUFFERS: 5864 break; 5865 5866 default: 5867 ALOGE("invalid port mode: %d", mode); 5868 break; 5869 } 5870} 5871 5872void ACodec::BaseState::getMoreInputDataIfPossible() { 5873 if (mCodec->mPortEOS[kPortIndexInput]) { 5874 return; 5875 } 5876 5877 BufferInfo *eligible = NULL; 5878 5879 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 5880 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5881 5882#if 0 5883 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 5884 // There's already a "read" pending. 5885 return; 5886 } 5887#endif 5888 5889 if (info->mStatus == BufferInfo::OWNED_BY_US) { 5890 eligible = info; 5891 } 5892 } 5893 5894 if (eligible == NULL) { 5895 return; 5896 } 5897 5898 postFillThisBuffer(eligible); 5899} 5900 5901bool ACodec::BaseState::onOMXFillBufferDone( 5902 IOMX::buffer_id bufferID, 5903 size_t rangeOffset, size_t rangeLength, 5904 OMX_U32 flags, 5905 int64_t timeUs, 5906 int fenceFd) { 5907 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 5908 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 5909 5910 ssize_t index; 5911 status_t err= OK; 5912 5913#if TRACK_BUFFER_TIMING 5914 index = mCodec->mBufferStats.indexOfKey(timeUs); 5915 if (index >= 0) { 5916 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 5917 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 5918 5919 ALOGI("frame PTS %lld: %lld", 5920 timeUs, 5921 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 5922 5923 mCodec->mBufferStats.removeItemsAt(index); 5924 stats = NULL; 5925 } 5926#endif 5927 5928 BufferInfo *info = 5929 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5930 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5931 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5932 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5933 mCodec->dumpBuffers(kPortIndexOutput); 5934 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5935 if (fenceFd >= 0) { 5936 ::close(fenceFd); 5937 } 5938 return true; 5939 } 5940 5941 info->mDequeuedAt = ++mCodec->mDequeueCounter; 5942 info->mStatus = BufferInfo::OWNED_BY_US; 5943 5944 if (info->mRenderInfo != NULL) { 5945 // The fence for an emptied buffer must have signaled, but there still could be queued 5946 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 5947 // as we will soon requeue this buffer to the surface. While in theory we could still keep 5948 // track of buffers that are requeued to the surface, it is better to add support to the 5949 // buffer-queue to notify us of released buffers and their fences (in the future). 5950 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 5951 } 5952 5953 // byte buffers cannot take fences, so wait for any fence now 5954 if (mCodec->mNativeWindow == NULL) { 5955 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 5956 fenceFd = -1; 5957 } 5958 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 5959 5960 PortMode mode = getPortMode(kPortIndexOutput); 5961 5962 switch (mode) { 5963 case KEEP_BUFFERS: 5964 break; 5965 5966 case RESUBMIT_BUFFERS: 5967 { 5968 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 5969 || mCodec->mPortEOS[kPortIndexOutput])) { 5970 ALOGV("[%s] calling fillBuffer %u", 5971 mCodec->mComponentName.c_str(), info->mBufferID); 5972 5973 err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 5974 info->mFenceFd = -1; 5975 if (err != OK) { 5976 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5977 return true; 5978 } 5979 5980 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5981 break; 5982 } 5983 5984 sp<AMessage> reply = 5985 new AMessage(kWhatOutputBufferDrained, mCodec); 5986 5987 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 5988 // pretend that output format has changed on the first frame (we used to do this) 5989 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) { 5990 mCodec->onOutputFormatChanged(mCodec->mOutputFormat); 5991 } 5992 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 5993 mCodec->sendFormatChange(); 5994 } else if (rangeLength > 0 && mCodec->mNativeWindow != NULL) { 5995 // If potentially rendering onto a surface, always save key format data (crop & 5996 // data space) so that we can set it if and once the buffer is rendered. 5997 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 5998 } 5999 6000 if (mCodec->usingMetadataOnEncoderOutput()) { 6001 native_handle_t *handle = NULL; 6002 VideoNativeHandleMetadata &nativeMeta = 6003 *(VideoNativeHandleMetadata *)info->mData->data(); 6004 if (info->mData->size() >= sizeof(nativeMeta) 6005 && nativeMeta.eType == kMetadataBufferTypeNativeHandleSource) { 6006#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 6007 // handle is only valid on 32-bit/mediaserver process 6008 handle = NULL; 6009#else 6010 handle = (native_handle_t *)nativeMeta.pHandle; 6011#endif 6012 } 6013 info->mData->meta()->setPointer("handle", handle); 6014 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 6015 info->mData->meta()->setInt32("rangeLength", rangeLength); 6016 } else if (info->mData == info->mCodecData) { 6017 info->mData->setRange(rangeOffset, rangeLength); 6018 } else { 6019 info->mCodecData->setRange(rangeOffset, rangeLength); 6020 // in this case we know that mConverter is not null 6021 status_t err = mCodec->mConverter[kPortIndexOutput]->convert( 6022 info->mCodecData, info->mData); 6023 if (err != OK) { 6024 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6025 return true; 6026 } 6027 } 6028#if 0 6029 if (mCodec->mNativeWindow == NULL) { 6030 if (IsIDR(info->mData)) { 6031 ALOGI("IDR frame"); 6032 } 6033 } 6034#endif 6035 6036 if (mCodec->mSkipCutBuffer != NULL) { 6037 mCodec->mSkipCutBuffer->submit(info->mData); 6038 } 6039 info->mData->meta()->setInt64("timeUs", timeUs); 6040 6041 sp<AMessage> notify = mCodec->mNotify->dup(); 6042 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 6043 notify->setInt32("buffer-id", info->mBufferID); 6044 notify->setBuffer("buffer", info->mData); 6045 notify->setInt32("flags", flags); 6046 6047 reply->setInt32("buffer-id", info->mBufferID); 6048 6049 notify->setMessage("reply", reply); 6050 6051 notify->post(); 6052 6053 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 6054 6055 if (flags & OMX_BUFFERFLAG_EOS) { 6056 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 6057 6058 sp<AMessage> notify = mCodec->mNotify->dup(); 6059 notify->setInt32("what", CodecBase::kWhatEOS); 6060 notify->setInt32("err", mCodec->mInputEOSResult); 6061 notify->post(); 6062 6063 mCodec->mPortEOS[kPortIndexOutput] = true; 6064 } 6065 break; 6066 } 6067 6068 case FREE_BUFFERS: 6069 err = mCodec->freeBuffer(kPortIndexOutput, index); 6070 if (err != OK) { 6071 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6072 return true; 6073 } 6074 break; 6075 6076 default: 6077 ALOGE("Invalid port mode: %d", mode); 6078 return false; 6079 } 6080 6081 return true; 6082} 6083 6084void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 6085 IOMX::buffer_id bufferID; 6086 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 6087 ssize_t index; 6088 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6089 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6090 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 6091 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6092 mCodec->dumpBuffers(kPortIndexOutput); 6093 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6094 return; 6095 } 6096 6097 android_native_rect_t crop; 6098 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom) 6099 && memcmp(&crop, &mCodec->mLastNativeWindowCrop, sizeof(crop)) != 0) { 6100 mCodec->mLastNativeWindowCrop = crop; 6101 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 6102 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 6103 } 6104 6105 int32_t dataSpace; 6106 if (msg->findInt32("dataspace", &dataSpace) 6107 && dataSpace != mCodec->mLastNativeWindowDataSpace) { 6108 status_t err = native_window_set_buffers_data_space( 6109 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 6110 mCodec->mLastNativeWindowDataSpace = dataSpace; 6111 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 6112 } 6113 6114 int32_t render; 6115 if (mCodec->mNativeWindow != NULL 6116 && msg->findInt32("render", &render) && render != 0 6117 && info->mData != NULL && info->mData->size() != 0) { 6118 ATRACE_NAME("render"); 6119 // The client wants this buffer to be rendered. 6120 6121 // save buffers sent to the surface so we can get render time when they return 6122 int64_t mediaTimeUs = -1; 6123 info->mData->meta()->findInt64("timeUs", &mediaTimeUs); 6124 if (mediaTimeUs >= 0) { 6125 mCodec->mRenderTracker.onFrameQueued( 6126 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 6127 } 6128 6129 int64_t timestampNs = 0; 6130 if (!msg->findInt64("timestampNs", ×tampNs)) { 6131 // use media timestamp if client did not request a specific render timestamp 6132 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 6133 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 6134 timestampNs *= 1000; 6135 } 6136 } 6137 6138 status_t err; 6139 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 6140 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 6141 6142 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 6143 err = mCodec->mNativeWindow->queueBuffer( 6144 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 6145 info->mFenceFd = -1; 6146 if (err == OK) { 6147 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 6148 } else { 6149 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 6150 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6151 info->mStatus = BufferInfo::OWNED_BY_US; 6152 // keeping read fence as write fence to avoid clobbering 6153 info->mIsReadFence = false; 6154 } 6155 } else { 6156 if (mCodec->mNativeWindow != NULL && 6157 (info->mData == NULL || info->mData->size() != 0)) { 6158 // move read fence into write fence to avoid clobbering 6159 info->mIsReadFence = false; 6160 ATRACE_NAME("frame-drop"); 6161 } 6162 info->mStatus = BufferInfo::OWNED_BY_US; 6163 } 6164 6165 PortMode mode = getPortMode(kPortIndexOutput); 6166 6167 switch (mode) { 6168 case KEEP_BUFFERS: 6169 { 6170 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 6171 6172 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6173 // We cannot resubmit the buffer we just rendered, dequeue 6174 // the spare instead. 6175 6176 info = mCodec->dequeueBufferFromNativeWindow(); 6177 } 6178 break; 6179 } 6180 6181 case RESUBMIT_BUFFERS: 6182 { 6183 if (!mCodec->mPortEOS[kPortIndexOutput]) { 6184 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6185 // We cannot resubmit the buffer we just rendered, dequeue 6186 // the spare instead. 6187 6188 info = mCodec->dequeueBufferFromNativeWindow(); 6189 } 6190 6191 if (info != NULL) { 6192 ALOGV("[%s] calling fillBuffer %u", 6193 mCodec->mComponentName.c_str(), info->mBufferID); 6194 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 6195 status_t err = mCodec->mOMX->fillBuffer( 6196 mCodec->mNode, info->mBufferID, info->mFenceFd); 6197 info->mFenceFd = -1; 6198 if (err == OK) { 6199 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6200 } else { 6201 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6202 } 6203 } 6204 } 6205 break; 6206 } 6207 6208 case FREE_BUFFERS: 6209 { 6210 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 6211 if (err != OK) { 6212 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6213 } 6214 break; 6215 } 6216 6217 default: 6218 ALOGE("Invalid port mode: %d", mode); 6219 return; 6220 } 6221} 6222 6223//////////////////////////////////////////////////////////////////////////////// 6224 6225ACodec::UninitializedState::UninitializedState(ACodec *codec) 6226 : BaseState(codec) { 6227} 6228 6229void ACodec::UninitializedState::stateEntered() { 6230 ALOGV("Now uninitialized"); 6231 6232 if (mDeathNotifier != NULL) { 6233 mCodec->mNodeBinder->unlinkToDeath(mDeathNotifier); 6234 mDeathNotifier.clear(); 6235 } 6236 6237 mCodec->mUsingNativeWindow = false; 6238 mCodec->mNativeWindow.clear(); 6239 mCodec->mNativeWindowUsageBits = 0; 6240 mCodec->mNode = 0; 6241 mCodec->mOMX.clear(); 6242 mCodec->mQuirks = 0; 6243 mCodec->mFlags = 0; 6244 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 6245 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 6246 mCodec->mConverter[0].clear(); 6247 mCodec->mConverter[1].clear(); 6248 mCodec->mComponentName.clear(); 6249} 6250 6251bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 6252 bool handled = false; 6253 6254 switch (msg->what()) { 6255 case ACodec::kWhatSetup: 6256 { 6257 onSetup(msg); 6258 6259 handled = true; 6260 break; 6261 } 6262 6263 case ACodec::kWhatAllocateComponent: 6264 { 6265 onAllocateComponent(msg); 6266 handled = true; 6267 break; 6268 } 6269 6270 case ACodec::kWhatShutdown: 6271 { 6272 int32_t keepComponentAllocated; 6273 CHECK(msg->findInt32( 6274 "keepComponentAllocated", &keepComponentAllocated)); 6275 ALOGW_IF(keepComponentAllocated, 6276 "cannot keep component allocated on shutdown in Uninitialized state"); 6277 6278 sp<AMessage> notify = mCodec->mNotify->dup(); 6279 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6280 notify->post(); 6281 6282 handled = true; 6283 break; 6284 } 6285 6286 case ACodec::kWhatFlush: 6287 { 6288 sp<AMessage> notify = mCodec->mNotify->dup(); 6289 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6290 notify->post(); 6291 6292 handled = true; 6293 break; 6294 } 6295 6296 case ACodec::kWhatReleaseCodecInstance: 6297 { 6298 // nothing to do, as we have already signaled shutdown 6299 handled = true; 6300 break; 6301 } 6302 6303 default: 6304 return BaseState::onMessageReceived(msg); 6305 } 6306 6307 return handled; 6308} 6309 6310void ACodec::UninitializedState::onSetup( 6311 const sp<AMessage> &msg) { 6312 if (onAllocateComponent(msg) 6313 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6314 mCodec->mLoadedState->onStart(); 6315 } 6316} 6317 6318bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6319 ALOGV("onAllocateComponent"); 6320 6321 CHECK(mCodec->mNode == 0); 6322 6323 OMXClient client; 6324 if (client.connect() != OK) { 6325 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6326 return false; 6327 } 6328 6329 sp<IOMX> omx = client.interface(); 6330 6331 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6332 6333 Vector<AString> matchingCodecs; 6334 6335 AString mime; 6336 6337 AString componentName; 6338 uint32_t quirks = 0; 6339 int32_t encoder = false; 6340 if (msg->findString("componentName", &componentName)) { 6341 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6342 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6343 matchingCodecs.add(componentName); 6344 } 6345 } else { 6346 CHECK(msg->findString("mime", &mime)); 6347 6348 if (!msg->findInt32("encoder", &encoder)) { 6349 encoder = false; 6350 } 6351 6352 MediaCodecList::findMatchingCodecs( 6353 mime.c_str(), 6354 encoder, // createEncoder 6355 0, // flags 6356 &matchingCodecs); 6357 } 6358 6359 sp<CodecObserver> observer = new CodecObserver; 6360 IOMX::node_id node = 0; 6361 6362 status_t err = NAME_NOT_FOUND; 6363 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6364 ++matchIndex) { 6365 componentName = matchingCodecs[matchIndex]; 6366 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6367 6368 pid_t tid = gettid(); 6369 int prevPriority = androidGetThreadPriority(tid); 6370 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6371 err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node); 6372 androidSetThreadPriority(tid, prevPriority); 6373 6374 if (err == OK) { 6375 break; 6376 } else { 6377 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6378 } 6379 6380 node = 0; 6381 } 6382 6383 if (node == 0) { 6384 if (!mime.empty()) { 6385 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6386 encoder ? "en" : "de", mime.c_str(), err); 6387 } else { 6388 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6389 } 6390 6391 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6392 return false; 6393 } 6394 6395 mDeathNotifier = new DeathNotifier(notify); 6396 if (mCodec->mNodeBinder == NULL || 6397 mCodec->mNodeBinder->linkToDeath(mDeathNotifier) != OK) { 6398 // This was a local binder, if it dies so do we, we won't care 6399 // about any notifications in the afterlife. 6400 mDeathNotifier.clear(); 6401 } 6402 6403 notify = new AMessage(kWhatOMXMessageList, mCodec); 6404 observer->setNotificationMessage(notify); 6405 6406 mCodec->mComponentName = componentName; 6407 mCodec->mRenderTracker.setComponentName(componentName); 6408 mCodec->mFlags = 0; 6409 6410 if (componentName.endsWith(".secure")) { 6411 mCodec->mFlags |= kFlagIsSecure; 6412 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6413 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6414 } 6415 6416 mCodec->mQuirks = quirks; 6417 mCodec->mOMX = omx; 6418 mCodec->mNode = node; 6419 6420 { 6421 sp<AMessage> notify = mCodec->mNotify->dup(); 6422 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6423 notify->setString("componentName", mCodec->mComponentName.c_str()); 6424 notify->post(); 6425 } 6426 6427 mCodec->changeState(mCodec->mLoadedState); 6428 6429 return true; 6430} 6431 6432//////////////////////////////////////////////////////////////////////////////// 6433 6434ACodec::LoadedState::LoadedState(ACodec *codec) 6435 : BaseState(codec) { 6436} 6437 6438void ACodec::LoadedState::stateEntered() { 6439 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6440 6441 mCodec->mPortEOS[kPortIndexInput] = 6442 mCodec->mPortEOS[kPortIndexOutput] = false; 6443 6444 mCodec->mInputEOSResult = OK; 6445 6446 mCodec->mDequeueCounter = 0; 6447 mCodec->mMetadataBuffersToSubmit = 0; 6448 mCodec->mRepeatFrameDelayUs = -1ll; 6449 mCodec->mInputFormat.clear(); 6450 mCodec->mOutputFormat.clear(); 6451 mCodec->mBaseOutputFormat.clear(); 6452 6453 if (mCodec->mShutdownInProgress) { 6454 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6455 6456 mCodec->mShutdownInProgress = false; 6457 mCodec->mKeepComponentAllocated = false; 6458 6459 onShutdown(keepComponentAllocated); 6460 } 6461 mCodec->mExplicitShutdown = false; 6462 6463 mCodec->processDeferredMessages(); 6464} 6465 6466void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6467 if (!keepComponentAllocated) { 6468 (void)mCodec->mOMX->freeNode(mCodec->mNode); 6469 6470 mCodec->changeState(mCodec->mUninitializedState); 6471 } 6472 6473 if (mCodec->mExplicitShutdown) { 6474 sp<AMessage> notify = mCodec->mNotify->dup(); 6475 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6476 notify->post(); 6477 mCodec->mExplicitShutdown = false; 6478 } 6479} 6480 6481bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6482 bool handled = false; 6483 6484 switch (msg->what()) { 6485 case ACodec::kWhatConfigureComponent: 6486 { 6487 onConfigureComponent(msg); 6488 handled = true; 6489 break; 6490 } 6491 6492 case ACodec::kWhatCreateInputSurface: 6493 { 6494 onCreateInputSurface(msg); 6495 handled = true; 6496 break; 6497 } 6498 6499 case ACodec::kWhatSetInputSurface: 6500 { 6501 onSetInputSurface(msg); 6502 handled = true; 6503 break; 6504 } 6505 6506 case ACodec::kWhatStart: 6507 { 6508 onStart(); 6509 handled = true; 6510 break; 6511 } 6512 6513 case ACodec::kWhatShutdown: 6514 { 6515 int32_t keepComponentAllocated; 6516 CHECK(msg->findInt32( 6517 "keepComponentAllocated", &keepComponentAllocated)); 6518 6519 mCodec->mExplicitShutdown = true; 6520 onShutdown(keepComponentAllocated); 6521 6522 handled = true; 6523 break; 6524 } 6525 6526 case ACodec::kWhatFlush: 6527 { 6528 sp<AMessage> notify = mCodec->mNotify->dup(); 6529 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6530 notify->post(); 6531 6532 handled = true; 6533 break; 6534 } 6535 6536 default: 6537 return BaseState::onMessageReceived(msg); 6538 } 6539 6540 return handled; 6541} 6542 6543bool ACodec::LoadedState::onConfigureComponent( 6544 const sp<AMessage> &msg) { 6545 ALOGV("onConfigureComponent"); 6546 6547 CHECK(mCodec->mNode != 0); 6548 6549 status_t err = OK; 6550 AString mime; 6551 if (!msg->findString("mime", &mime)) { 6552 err = BAD_VALUE; 6553 } else { 6554 err = mCodec->configureCodec(mime.c_str(), msg); 6555 } 6556 if (err != OK) { 6557 ALOGE("[%s] configureCodec returning error %d", 6558 mCodec->mComponentName.c_str(), err); 6559 6560 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6561 return false; 6562 } 6563 6564 { 6565 sp<AMessage> notify = mCodec->mNotify->dup(); 6566 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6567 notify->setMessage("input-format", mCodec->mInputFormat); 6568 notify->setMessage("output-format", mCodec->mOutputFormat); 6569 notify->post(); 6570 } 6571 6572 return true; 6573} 6574 6575status_t ACodec::LoadedState::setupInputSurface() { 6576 status_t err = OK; 6577 6578 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6579 err = mCodec->mOMX->setInternalOption( 6580 mCodec->mNode, 6581 kPortIndexInput, 6582 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 6583 &mCodec->mRepeatFrameDelayUs, 6584 sizeof(mCodec->mRepeatFrameDelayUs)); 6585 6586 if (err != OK) { 6587 ALOGE("[%s] Unable to configure option to repeat previous " 6588 "frames (err %d)", 6589 mCodec->mComponentName.c_str(), 6590 err); 6591 return err; 6592 } 6593 } 6594 6595 if (mCodec->mMaxPtsGapUs > 0ll) { 6596 err = mCodec->mOMX->setInternalOption( 6597 mCodec->mNode, 6598 kPortIndexInput, 6599 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 6600 &mCodec->mMaxPtsGapUs, 6601 sizeof(mCodec->mMaxPtsGapUs)); 6602 6603 if (err != OK) { 6604 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6605 mCodec->mComponentName.c_str(), 6606 err); 6607 return err; 6608 } 6609 } 6610 6611 if (mCodec->mMaxFps > 0) { 6612 err = mCodec->mOMX->setInternalOption( 6613 mCodec->mNode, 6614 kPortIndexInput, 6615 IOMX::INTERNAL_OPTION_MAX_FPS, 6616 &mCodec->mMaxFps, 6617 sizeof(mCodec->mMaxFps)); 6618 6619 if (err != OK) { 6620 ALOGE("[%s] Unable to configure max fps (err %d)", 6621 mCodec->mComponentName.c_str(), 6622 err); 6623 return err; 6624 } 6625 } 6626 6627 if (mCodec->mTimePerCaptureUs > 0ll 6628 && mCodec->mTimePerFrameUs > 0ll) { 6629 int64_t timeLapse[2]; 6630 timeLapse[0] = mCodec->mTimePerFrameUs; 6631 timeLapse[1] = mCodec->mTimePerCaptureUs; 6632 err = mCodec->mOMX->setInternalOption( 6633 mCodec->mNode, 6634 kPortIndexInput, 6635 IOMX::INTERNAL_OPTION_TIME_LAPSE, 6636 &timeLapse[0], 6637 sizeof(timeLapse)); 6638 6639 if (err != OK) { 6640 ALOGE("[%s] Unable to configure time lapse (err %d)", 6641 mCodec->mComponentName.c_str(), 6642 err); 6643 return err; 6644 } 6645 } 6646 6647 if (mCodec->mCreateInputBuffersSuspended) { 6648 bool suspend = true; 6649 err = mCodec->mOMX->setInternalOption( 6650 mCodec->mNode, 6651 kPortIndexInput, 6652 IOMX::INTERNAL_OPTION_SUSPEND, 6653 &suspend, 6654 sizeof(suspend)); 6655 6656 if (err != OK) { 6657 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6658 mCodec->mComponentName.c_str(), 6659 err); 6660 return err; 6661 } 6662 } 6663 6664 uint32_t usageBits; 6665 if (mCodec->mOMX->getParameter( 6666 mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6667 &usageBits, sizeof(usageBits)) == OK) { 6668 mCodec->mInputFormat->setInt32( 6669 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6670 } 6671 6672 sp<ABuffer> colorAspectsBuffer; 6673 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6674 err = mCodec->mOMX->setInternalOption( 6675 mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS, 6676 colorAspectsBuffer->base(), colorAspectsBuffer->capacity()); 6677 if (err != OK) { 6678 ALOGE("[%s] Unable to configure color aspects (err %d)", 6679 mCodec->mComponentName.c_str(), err); 6680 return err; 6681 } 6682 } 6683 return OK; 6684} 6685 6686void ACodec::LoadedState::onCreateInputSurface( 6687 const sp<AMessage> & /* msg */) { 6688 ALOGV("onCreateInputSurface"); 6689 6690 sp<AMessage> notify = mCodec->mNotify->dup(); 6691 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6692 6693 android_dataspace dataSpace; 6694 status_t err = 6695 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6696 notify->setMessage("input-format", mCodec->mInputFormat); 6697 notify->setMessage("output-format", mCodec->mOutputFormat); 6698 6699 sp<IGraphicBufferProducer> bufferProducer; 6700 if (err == OK) { 6701 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6702 err = mCodec->mOMX->createInputSurface( 6703 mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, 6704 &mCodec->mInputMetadataType); 6705 // framework uses ANW buffers internally instead of gralloc handles 6706 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6707 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6708 } 6709 } 6710 6711 if (err == OK) { 6712 err = setupInputSurface(); 6713 } 6714 6715 if (err == OK) { 6716 notify->setObject("input-surface", 6717 new BufferProducerWrapper(bufferProducer)); 6718 } else { 6719 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6720 // the error through because it's in the "configured" state. We 6721 // send a kWhatInputSurfaceCreated with an error value instead. 6722 ALOGE("[%s] onCreateInputSurface returning error %d", 6723 mCodec->mComponentName.c_str(), err); 6724 notify->setInt32("err", err); 6725 } 6726 notify->post(); 6727} 6728 6729void ACodec::LoadedState::onSetInputSurface( 6730 const sp<AMessage> &msg) { 6731 ALOGV("onSetInputSurface"); 6732 6733 sp<AMessage> notify = mCodec->mNotify->dup(); 6734 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6735 6736 sp<RefBase> obj; 6737 CHECK(msg->findObject("input-surface", &obj)); 6738 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6739 6740 android_dataspace dataSpace; 6741 status_t err = 6742 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6743 notify->setMessage("input-format", mCodec->mInputFormat); 6744 notify->setMessage("output-format", mCodec->mOutputFormat); 6745 6746 if (err == OK) { 6747 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6748 err = mCodec->mOMX->setInputSurface( 6749 mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(), 6750 &mCodec->mInputMetadataType); 6751 // framework uses ANW buffers internally instead of gralloc handles 6752 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6753 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6754 } 6755 } 6756 6757 if (err == OK) { 6758 surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace); 6759 err = setupInputSurface(); 6760 } 6761 6762 if (err != OK) { 6763 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6764 // the error through because it's in the "configured" state. We 6765 // send a kWhatInputSurfaceAccepted with an error value instead. 6766 ALOGE("[%s] onSetInputSurface returning error %d", 6767 mCodec->mComponentName.c_str(), err); 6768 notify->setInt32("err", err); 6769 } 6770 notify->post(); 6771} 6772 6773void ACodec::LoadedState::onStart() { 6774 ALOGV("onStart"); 6775 6776 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6777 if (err != OK) { 6778 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6779 } else { 6780 mCodec->changeState(mCodec->mLoadedToIdleState); 6781 } 6782} 6783 6784//////////////////////////////////////////////////////////////////////////////// 6785 6786ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 6787 : BaseState(codec) { 6788} 6789 6790void ACodec::LoadedToIdleState::stateEntered() { 6791 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 6792 6793 status_t err; 6794 if ((err = allocateBuffers()) != OK) { 6795 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 6796 "(error 0x%08x)", 6797 err); 6798 6799 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6800 6801 mCodec->mOMX->sendCommand( 6802 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 6803 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 6804 mCodec->freeBuffersOnPort(kPortIndexInput); 6805 } 6806 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 6807 mCodec->freeBuffersOnPort(kPortIndexOutput); 6808 } 6809 6810 mCodec->changeState(mCodec->mLoadedState); 6811 } 6812} 6813 6814status_t ACodec::LoadedToIdleState::allocateBuffers() { 6815 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 6816 6817 if (err != OK) { 6818 return err; 6819 } 6820 6821 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 6822} 6823 6824bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 6825 switch (msg->what()) { 6826 case kWhatSetParameters: 6827 case kWhatShutdown: 6828 { 6829 mCodec->deferMessage(msg); 6830 return true; 6831 } 6832 6833 case kWhatSignalEndOfInputStream: 6834 { 6835 mCodec->onSignalEndOfInputStream(); 6836 return true; 6837 } 6838 6839 case kWhatResume: 6840 { 6841 // We'll be active soon enough. 6842 return true; 6843 } 6844 6845 case kWhatFlush: 6846 { 6847 // We haven't even started yet, so we're flushed alright... 6848 sp<AMessage> notify = mCodec->mNotify->dup(); 6849 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6850 notify->post(); 6851 return true; 6852 } 6853 6854 default: 6855 return BaseState::onMessageReceived(msg); 6856 } 6857} 6858 6859bool ACodec::LoadedToIdleState::onOMXEvent( 6860 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6861 switch (event) { 6862 case OMX_EventCmdComplete: 6863 { 6864 status_t err = OK; 6865 if (data1 != (OMX_U32)OMX_CommandStateSet 6866 || data2 != (OMX_U32)OMX_StateIdle) { 6867 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 6868 asString((OMX_COMMANDTYPE)data1), data1, 6869 asString((OMX_STATETYPE)data2), data2); 6870 err = FAILED_TRANSACTION; 6871 } 6872 6873 if (err == OK) { 6874 err = mCodec->mOMX->sendCommand( 6875 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting); 6876 } 6877 6878 if (err != OK) { 6879 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6880 } else { 6881 mCodec->changeState(mCodec->mIdleToExecutingState); 6882 } 6883 6884 return true; 6885 } 6886 6887 default: 6888 return BaseState::onOMXEvent(event, data1, data2); 6889 } 6890} 6891 6892//////////////////////////////////////////////////////////////////////////////// 6893 6894ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 6895 : BaseState(codec) { 6896} 6897 6898void ACodec::IdleToExecutingState::stateEntered() { 6899 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 6900} 6901 6902bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6903 switch (msg->what()) { 6904 case kWhatSetParameters: 6905 case kWhatShutdown: 6906 { 6907 mCodec->deferMessage(msg); 6908 return true; 6909 } 6910 6911 case kWhatResume: 6912 { 6913 // We'll be active soon enough. 6914 return true; 6915 } 6916 6917 case kWhatFlush: 6918 { 6919 // We haven't even started yet, so we're flushed alright... 6920 sp<AMessage> notify = mCodec->mNotify->dup(); 6921 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6922 notify->post(); 6923 6924 return true; 6925 } 6926 6927 case kWhatSignalEndOfInputStream: 6928 { 6929 mCodec->onSignalEndOfInputStream(); 6930 return true; 6931 } 6932 6933 default: 6934 return BaseState::onMessageReceived(msg); 6935 } 6936} 6937 6938bool ACodec::IdleToExecutingState::onOMXEvent( 6939 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6940 switch (event) { 6941 case OMX_EventCmdComplete: 6942 { 6943 if (data1 != (OMX_U32)OMX_CommandStateSet 6944 || data2 != (OMX_U32)OMX_StateExecuting) { 6945 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 6946 asString((OMX_COMMANDTYPE)data1), data1, 6947 asString((OMX_STATETYPE)data2), data2); 6948 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6949 return true; 6950 } 6951 6952 mCodec->mExecutingState->resume(); 6953 mCodec->changeState(mCodec->mExecutingState); 6954 6955 return true; 6956 } 6957 6958 default: 6959 return BaseState::onOMXEvent(event, data1, data2); 6960 } 6961} 6962 6963//////////////////////////////////////////////////////////////////////////////// 6964 6965ACodec::ExecutingState::ExecutingState(ACodec *codec) 6966 : BaseState(codec), 6967 mActive(false) { 6968} 6969 6970ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 6971 OMX_U32 /* portIndex */) { 6972 return RESUBMIT_BUFFERS; 6973} 6974 6975void ACodec::ExecutingState::submitOutputMetaBuffers() { 6976 // submit as many buffers as there are input buffers with the codec 6977 // in case we are in port reconfiguring 6978 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 6979 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6980 6981 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 6982 if (mCodec->submitOutputMetadataBuffer() != OK) 6983 break; 6984 } 6985 } 6986 6987 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 6988 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 6989} 6990 6991void ACodec::ExecutingState::submitRegularOutputBuffers() { 6992 bool failed = false; 6993 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 6994 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 6995 6996 if (mCodec->mNativeWindow != NULL) { 6997 if (info->mStatus != BufferInfo::OWNED_BY_US 6998 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6999 ALOGE("buffers should be owned by us or the surface"); 7000 failed = true; 7001 break; 7002 } 7003 7004 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 7005 continue; 7006 } 7007 } else { 7008 if (info->mStatus != BufferInfo::OWNED_BY_US) { 7009 ALOGE("buffers should be owned by us"); 7010 failed = true; 7011 break; 7012 } 7013 } 7014 7015 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 7016 7017 info->checkWriteFence("submitRegularOutputBuffers"); 7018 status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 7019 info->mFenceFd = -1; 7020 if (err != OK) { 7021 failed = true; 7022 break; 7023 } 7024 7025 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 7026 } 7027 7028 if (failed) { 7029 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7030 } 7031} 7032 7033void ACodec::ExecutingState::submitOutputBuffers() { 7034 submitRegularOutputBuffers(); 7035 if (mCodec->storingMetadataInDecodedBuffers()) { 7036 submitOutputMetaBuffers(); 7037 } 7038} 7039 7040void ACodec::ExecutingState::resume() { 7041 if (mActive) { 7042 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 7043 return; 7044 } 7045 7046 submitOutputBuffers(); 7047 7048 // Post all available input buffers 7049 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 7050 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 7051 } 7052 7053 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 7054 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7055 if (info->mStatus == BufferInfo::OWNED_BY_US) { 7056 postFillThisBuffer(info); 7057 } 7058 } 7059 7060 mActive = true; 7061} 7062 7063void ACodec::ExecutingState::stateEntered() { 7064 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 7065 7066 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7067 mCodec->processDeferredMessages(); 7068} 7069 7070bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 7071 bool handled = false; 7072 7073 switch (msg->what()) { 7074 case kWhatShutdown: 7075 { 7076 int32_t keepComponentAllocated; 7077 CHECK(msg->findInt32( 7078 "keepComponentAllocated", &keepComponentAllocated)); 7079 7080 mCodec->mShutdownInProgress = true; 7081 mCodec->mExplicitShutdown = true; 7082 mCodec->mKeepComponentAllocated = keepComponentAllocated; 7083 7084 mActive = false; 7085 7086 status_t err = mCodec->mOMX->sendCommand( 7087 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 7088 if (err != OK) { 7089 if (keepComponentAllocated) { 7090 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7091 } 7092 // TODO: do some recovery here. 7093 } else { 7094 mCodec->changeState(mCodec->mExecutingToIdleState); 7095 } 7096 7097 handled = true; 7098 break; 7099 } 7100 7101 case kWhatFlush: 7102 { 7103 ALOGV("[%s] ExecutingState flushing now " 7104 "(codec owns %zu/%zu input, %zu/%zu output).", 7105 mCodec->mComponentName.c_str(), 7106 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 7107 mCodec->mBuffers[kPortIndexInput].size(), 7108 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 7109 mCodec->mBuffers[kPortIndexOutput].size()); 7110 7111 mActive = false; 7112 7113 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL); 7114 if (err != OK) { 7115 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7116 } else { 7117 mCodec->changeState(mCodec->mFlushingState); 7118 } 7119 7120 handled = true; 7121 break; 7122 } 7123 7124 case kWhatResume: 7125 { 7126 resume(); 7127 7128 handled = true; 7129 break; 7130 } 7131 7132 case kWhatRequestIDRFrame: 7133 { 7134 status_t err = mCodec->requestIDRFrame(); 7135 if (err != OK) { 7136 ALOGW("Requesting an IDR frame failed."); 7137 } 7138 7139 handled = true; 7140 break; 7141 } 7142 7143 case kWhatSetParameters: 7144 { 7145 sp<AMessage> params; 7146 CHECK(msg->findMessage("params", ¶ms)); 7147 7148 status_t err = mCodec->setParameters(params); 7149 7150 sp<AMessage> reply; 7151 if (msg->findMessage("reply", &reply)) { 7152 reply->setInt32("err", err); 7153 reply->post(); 7154 } 7155 7156 handled = true; 7157 break; 7158 } 7159 7160 case ACodec::kWhatSignalEndOfInputStream: 7161 { 7162 mCodec->onSignalEndOfInputStream(); 7163 handled = true; 7164 break; 7165 } 7166 7167 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7168 case kWhatSubmitOutputMetadataBufferIfEOS: 7169 { 7170 if (mCodec->mPortEOS[kPortIndexInput] && 7171 !mCodec->mPortEOS[kPortIndexOutput]) { 7172 status_t err = mCodec->submitOutputMetadataBuffer(); 7173 if (err == OK) { 7174 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7175 } 7176 } 7177 return true; 7178 } 7179 7180 default: 7181 handled = BaseState::onMessageReceived(msg); 7182 break; 7183 } 7184 7185 return handled; 7186} 7187 7188status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 7189 int32_t videoBitrate; 7190 if (params->findInt32("video-bitrate", &videoBitrate)) { 7191 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 7192 InitOMXParams(&configParams); 7193 configParams.nPortIndex = kPortIndexOutput; 7194 configParams.nEncodeBitrate = videoBitrate; 7195 7196 status_t err = mOMX->setConfig( 7197 mNode, 7198 OMX_IndexConfigVideoBitrate, 7199 &configParams, 7200 sizeof(configParams)); 7201 7202 if (err != OK) { 7203 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 7204 videoBitrate, err); 7205 7206 return err; 7207 } 7208 } 7209 7210 int64_t timeOffsetUs; 7211 if (params->findInt64("time-offset-us", &timeOffsetUs)) { 7212 status_t err = mOMX->setInternalOption( 7213 mNode, 7214 kPortIndexInput, 7215 IOMX::INTERNAL_OPTION_TIME_OFFSET, 7216 &timeOffsetUs, 7217 sizeof(timeOffsetUs)); 7218 7219 if (err != OK) { 7220 ALOGE("[%s] Unable to set input buffer time offset (err %d)", 7221 mComponentName.c_str(), 7222 err); 7223 return err; 7224 } 7225 } 7226 7227 int64_t skipFramesBeforeUs; 7228 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 7229 status_t err = 7230 mOMX->setInternalOption( 7231 mNode, 7232 kPortIndexInput, 7233 IOMX::INTERNAL_OPTION_START_TIME, 7234 &skipFramesBeforeUs, 7235 sizeof(skipFramesBeforeUs)); 7236 7237 if (err != OK) { 7238 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 7239 return err; 7240 } 7241 } 7242 7243 int32_t dropInputFrames; 7244 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 7245 bool suspend = dropInputFrames != 0; 7246 7247 status_t err = 7248 mOMX->setInternalOption( 7249 mNode, 7250 kPortIndexInput, 7251 IOMX::INTERNAL_OPTION_SUSPEND, 7252 &suspend, 7253 sizeof(suspend)); 7254 7255 if (err != OK) { 7256 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 7257 return err; 7258 } 7259 } 7260 7261 int32_t dummy; 7262 if (params->findInt32("request-sync", &dummy)) { 7263 status_t err = requestIDRFrame(); 7264 7265 if (err != OK) { 7266 ALOGE("Requesting a sync frame failed w/ err %d", err); 7267 return err; 7268 } 7269 } 7270 7271 float rate; 7272 if (params->findFloat("operating-rate", &rate) && rate > 0) { 7273 status_t err = setOperatingRate(rate, mIsVideo); 7274 if (err != OK) { 7275 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 7276 return err; 7277 } 7278 } 7279 7280 int32_t intraRefreshPeriod = 0; 7281 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 7282 && intraRefreshPeriod > 0) { 7283 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 7284 if (err != OK) { 7285 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 7286 mComponentName.c_str()); 7287 err = OK; 7288 } 7289 } 7290 7291 status_t err = configureTemporalLayers(params, false /* inConfigure */, mOutputFormat); 7292 if (err != OK) { 7293 err = OK; // ignore failure 7294 } 7295 7296 return err; 7297} 7298 7299void ACodec::onSignalEndOfInputStream() { 7300 sp<AMessage> notify = mNotify->dup(); 7301 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 7302 7303 status_t err = mOMX->signalEndOfInputStream(mNode); 7304 if (err != OK) { 7305 notify->setInt32("err", err); 7306 } 7307 notify->post(); 7308} 7309 7310bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 7311 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7312 return true; 7313} 7314 7315bool ACodec::ExecutingState::onOMXEvent( 7316 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7317 switch (event) { 7318 case OMX_EventPortSettingsChanged: 7319 { 7320 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 7321 7322 mCodec->onOutputFormatChanged(); 7323 7324 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 7325 mCodec->mMetadataBuffersToSubmit = 0; 7326 CHECK_EQ(mCodec->mOMX->sendCommand( 7327 mCodec->mNode, 7328 OMX_CommandPortDisable, kPortIndexOutput), 7329 (status_t)OK); 7330 7331 mCodec->freeOutputBuffersNotOwnedByComponent(); 7332 7333 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7334 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7335 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7336 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7337 mCodec->mComponentName.c_str(), data2); 7338 } 7339 7340 return true; 7341 } 7342 7343 case OMX_EventBufferFlag: 7344 { 7345 return true; 7346 } 7347 7348 default: 7349 return BaseState::onOMXEvent(event, data1, data2); 7350 } 7351} 7352 7353//////////////////////////////////////////////////////////////////////////////// 7354 7355ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7356 ACodec *codec) 7357 : BaseState(codec) { 7358} 7359 7360ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7361 OMX_U32 portIndex) { 7362 if (portIndex == kPortIndexOutput) { 7363 return FREE_BUFFERS; 7364 } 7365 7366 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7367 7368 return RESUBMIT_BUFFERS; 7369} 7370 7371bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7372 const sp<AMessage> &msg) { 7373 bool handled = false; 7374 7375 switch (msg->what()) { 7376 case kWhatFlush: 7377 case kWhatShutdown: 7378 case kWhatResume: 7379 case kWhatSetParameters: 7380 { 7381 if (msg->what() == kWhatResume) { 7382 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7383 } 7384 7385 mCodec->deferMessage(msg); 7386 handled = true; 7387 break; 7388 } 7389 7390 default: 7391 handled = BaseState::onMessageReceived(msg); 7392 break; 7393 } 7394 7395 return handled; 7396} 7397 7398void ACodec::OutputPortSettingsChangedState::stateEntered() { 7399 ALOGV("[%s] Now handling output port settings change", 7400 mCodec->mComponentName.c_str()); 7401} 7402 7403bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7404 int64_t mediaTimeUs, nsecs_t systemNano) { 7405 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7406 return true; 7407} 7408 7409bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7410 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7411 switch (event) { 7412 case OMX_EventCmdComplete: 7413 { 7414 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7415 if (data2 != (OMX_U32)kPortIndexOutput) { 7416 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7417 return false; 7418 } 7419 7420 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7421 7422 status_t err = OK; 7423 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7424 ALOGE("disabled port should be empty, but has %zu buffers", 7425 mCodec->mBuffers[kPortIndexOutput].size()); 7426 err = FAILED_TRANSACTION; 7427 } else { 7428 mCodec->mDealer[kPortIndexOutput].clear(); 7429 } 7430 7431 if (err == OK) { 7432 err = mCodec->mOMX->sendCommand( 7433 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput); 7434 } 7435 7436 if (err == OK) { 7437 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7438 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7439 "reconfiguration: (%d)", err); 7440 } 7441 7442 if (err != OK) { 7443 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7444 7445 // This is technically not correct, but appears to be 7446 // the only way to free the component instance. 7447 // Controlled transitioning from excecuting->idle 7448 // and idle->loaded seem impossible probably because 7449 // the output port never finishes re-enabling. 7450 mCodec->mShutdownInProgress = true; 7451 mCodec->mKeepComponentAllocated = false; 7452 mCodec->changeState(mCodec->mLoadedState); 7453 } 7454 7455 return true; 7456 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7457 if (data2 != (OMX_U32)kPortIndexOutput) { 7458 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7459 return false; 7460 } 7461 7462 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7463 7464 if (mCodec->mExecutingState->active()) { 7465 mCodec->mExecutingState->submitOutputBuffers(); 7466 } 7467 7468 mCodec->changeState(mCodec->mExecutingState); 7469 7470 return true; 7471 } 7472 7473 return false; 7474 } 7475 7476 default: 7477 return false; 7478 } 7479} 7480 7481//////////////////////////////////////////////////////////////////////////////// 7482 7483ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7484 : BaseState(codec), 7485 mComponentNowIdle(false) { 7486} 7487 7488bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7489 bool handled = false; 7490 7491 switch (msg->what()) { 7492 case kWhatFlush: 7493 { 7494 // Don't send me a flush request if you previously wanted me 7495 // to shutdown. 7496 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7497 break; 7498 } 7499 7500 case kWhatShutdown: 7501 { 7502 // We're already doing that... 7503 7504 handled = true; 7505 break; 7506 } 7507 7508 default: 7509 handled = BaseState::onMessageReceived(msg); 7510 break; 7511 } 7512 7513 return handled; 7514} 7515 7516void ACodec::ExecutingToIdleState::stateEntered() { 7517 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7518 7519 mComponentNowIdle = false; 7520 mCodec->mLastOutputFormat.clear(); 7521} 7522 7523bool ACodec::ExecutingToIdleState::onOMXEvent( 7524 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7525 switch (event) { 7526 case OMX_EventCmdComplete: 7527 { 7528 if (data1 != (OMX_U32)OMX_CommandStateSet 7529 || data2 != (OMX_U32)OMX_StateIdle) { 7530 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7531 asString((OMX_COMMANDTYPE)data1), data1, 7532 asString((OMX_STATETYPE)data2), data2); 7533 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7534 return true; 7535 } 7536 7537 mComponentNowIdle = true; 7538 7539 changeStateIfWeOwnAllBuffers(); 7540 7541 return true; 7542 } 7543 7544 case OMX_EventPortSettingsChanged: 7545 case OMX_EventBufferFlag: 7546 { 7547 // We're shutting down and don't care about this anymore. 7548 return true; 7549 } 7550 7551 default: 7552 return BaseState::onOMXEvent(event, data1, data2); 7553 } 7554} 7555 7556void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7557 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7558 status_t err = mCodec->mOMX->sendCommand( 7559 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7560 if (err == OK) { 7561 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7562 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7563 if (err == OK) { 7564 err = err2; 7565 } 7566 } 7567 7568 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7569 && mCodec->mNativeWindow != NULL) { 7570 // We push enough 1x1 blank buffers to ensure that one of 7571 // them has made it to the display. This allows the OMX 7572 // component teardown to zero out any protected buffers 7573 // without the risk of scanning out one of those buffers. 7574 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7575 } 7576 7577 if (err != OK) { 7578 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7579 return; 7580 } 7581 7582 mCodec->changeState(mCodec->mIdleToLoadedState); 7583 } 7584} 7585 7586void ACodec::ExecutingToIdleState::onInputBufferFilled( 7587 const sp<AMessage> &msg) { 7588 BaseState::onInputBufferFilled(msg); 7589 7590 changeStateIfWeOwnAllBuffers(); 7591} 7592 7593void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7594 const sp<AMessage> &msg) { 7595 BaseState::onOutputBufferDrained(msg); 7596 7597 changeStateIfWeOwnAllBuffers(); 7598} 7599 7600//////////////////////////////////////////////////////////////////////////////// 7601 7602ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7603 : BaseState(codec) { 7604} 7605 7606bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7607 bool handled = false; 7608 7609 switch (msg->what()) { 7610 case kWhatShutdown: 7611 { 7612 // We're already doing that... 7613 7614 handled = true; 7615 break; 7616 } 7617 7618 case kWhatFlush: 7619 { 7620 // Don't send me a flush request if you previously wanted me 7621 // to shutdown. 7622 ALOGE("Got flush request in IdleToLoadedState"); 7623 break; 7624 } 7625 7626 default: 7627 handled = BaseState::onMessageReceived(msg); 7628 break; 7629 } 7630 7631 return handled; 7632} 7633 7634void ACodec::IdleToLoadedState::stateEntered() { 7635 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7636} 7637 7638bool ACodec::IdleToLoadedState::onOMXEvent( 7639 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7640 switch (event) { 7641 case OMX_EventCmdComplete: 7642 { 7643 if (data1 != (OMX_U32)OMX_CommandStateSet 7644 || data2 != (OMX_U32)OMX_StateLoaded) { 7645 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7646 asString((OMX_COMMANDTYPE)data1), data1, 7647 asString((OMX_STATETYPE)data2), data2); 7648 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7649 return true; 7650 } 7651 7652 mCodec->changeState(mCodec->mLoadedState); 7653 7654 return true; 7655 } 7656 7657 default: 7658 return BaseState::onOMXEvent(event, data1, data2); 7659 } 7660} 7661 7662//////////////////////////////////////////////////////////////////////////////// 7663 7664ACodec::FlushingState::FlushingState(ACodec *codec) 7665 : BaseState(codec) { 7666} 7667 7668void ACodec::FlushingState::stateEntered() { 7669 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7670 7671 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7672} 7673 7674bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7675 bool handled = false; 7676 7677 switch (msg->what()) { 7678 case kWhatShutdown: 7679 { 7680 mCodec->deferMessage(msg); 7681 break; 7682 } 7683 7684 case kWhatFlush: 7685 { 7686 // We're already doing this right now. 7687 handled = true; 7688 break; 7689 } 7690 7691 default: 7692 handled = BaseState::onMessageReceived(msg); 7693 break; 7694 } 7695 7696 return handled; 7697} 7698 7699bool ACodec::FlushingState::onOMXEvent( 7700 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7701 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7702 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7703 7704 switch (event) { 7705 case OMX_EventCmdComplete: 7706 { 7707 if (data1 != (OMX_U32)OMX_CommandFlush) { 7708 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7709 asString((OMX_COMMANDTYPE)data1), data1, data2); 7710 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7711 return true; 7712 } 7713 7714 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7715 if (mFlushComplete[data2]) { 7716 ALOGW("Flush already completed for %s port", 7717 data2 == kPortIndexInput ? "input" : "output"); 7718 return true; 7719 } 7720 mFlushComplete[data2] = true; 7721 7722 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7723 changeStateIfWeOwnAllBuffers(); 7724 } 7725 } else if (data2 == OMX_ALL) { 7726 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7727 ALOGW("received flush complete event for OMX_ALL before ports have been" 7728 "flushed (%d/%d)", 7729 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7730 return false; 7731 } 7732 7733 changeStateIfWeOwnAllBuffers(); 7734 } else { 7735 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7736 } 7737 7738 return true; 7739 } 7740 7741 case OMX_EventPortSettingsChanged: 7742 { 7743 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7744 msg->setInt32("type", omx_message::EVENT); 7745 msg->setInt32("node", mCodec->mNode); 7746 msg->setInt32("event", event); 7747 msg->setInt32("data1", data1); 7748 msg->setInt32("data2", data2); 7749 7750 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7751 mCodec->mComponentName.c_str()); 7752 7753 mCodec->deferMessage(msg); 7754 7755 return true; 7756 } 7757 7758 default: 7759 return BaseState::onOMXEvent(event, data1, data2); 7760 } 7761 7762 return true; 7763} 7764 7765void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7766 BaseState::onOutputBufferDrained(msg); 7767 7768 changeStateIfWeOwnAllBuffers(); 7769} 7770 7771void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7772 BaseState::onInputBufferFilled(msg); 7773 7774 changeStateIfWeOwnAllBuffers(); 7775} 7776 7777void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 7778 if (mFlushComplete[kPortIndexInput] 7779 && mFlushComplete[kPortIndexOutput] 7780 && mCodec->allYourBuffersAreBelongToUs()) { 7781 // We now own all buffers except possibly those still queued with 7782 // the native window for rendering. Let's get those back as well. 7783 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 7784 7785 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7786 7787 sp<AMessage> notify = mCodec->mNotify->dup(); 7788 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7789 notify->post(); 7790 7791 mCodec->mPortEOS[kPortIndexInput] = 7792 mCodec->mPortEOS[kPortIndexOutput] = false; 7793 7794 mCodec->mInputEOSResult = OK; 7795 7796 if (mCodec->mSkipCutBuffer != NULL) { 7797 mCodec->mSkipCutBuffer->clear(); 7798 } 7799 7800 mCodec->changeState(mCodec->mExecutingState); 7801 } 7802} 7803 7804status_t ACodec::queryCapabilities( 7805 const AString &name, const AString &mime, bool isEncoder, 7806 sp<MediaCodecInfo::Capabilities> *caps) { 7807 (*caps).clear(); 7808 const char *role = GetComponentRole(isEncoder, mime.c_str()); 7809 if (role == NULL) { 7810 return BAD_VALUE; 7811 } 7812 7813 OMXClient client; 7814 status_t err = client.connect(); 7815 if (err != OK) { 7816 return err; 7817 } 7818 7819 sp<IOMX> omx = client.interface(); 7820 sp<CodecObserver> observer = new CodecObserver; 7821 IOMX::node_id node = 0; 7822 7823 err = omx->allocateNode(name.c_str(), observer, NULL, &node); 7824 if (err != OK) { 7825 client.disconnect(); 7826 return err; 7827 } 7828 7829 err = SetComponentRole(omx, node, role); 7830 if (err != OK) { 7831 omx->freeNode(node); 7832 client.disconnect(); 7833 return err; 7834 } 7835 7836 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 7837 bool isVideo = mime.startsWithIgnoreCase("video/"); 7838 7839 if (isVideo) { 7840 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 7841 InitOMXParams(¶m); 7842 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7843 7844 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7845 param.nProfileIndex = index; 7846 status_t err = omx->getParameter( 7847 node, OMX_IndexParamVideoProfileLevelQuerySupported, 7848 ¶m, sizeof(param)); 7849 if (err != OK) { 7850 break; 7851 } 7852 builder->addProfileLevel(param.eProfile, param.eLevel); 7853 7854 if (index == kMaxIndicesToCheck) { 7855 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 7856 name.c_str(), index, 7857 param.eProfile, param.eLevel); 7858 } 7859 } 7860 7861 // Color format query 7862 // return colors in the order reported by the OMX component 7863 // prefix "flexible" standard ones with the flexible equivalent 7864 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 7865 InitOMXParams(&portFormat); 7866 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 7867 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 7868 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7869 portFormat.nIndex = index; 7870 status_t err = omx->getParameter( 7871 node, OMX_IndexParamVideoPortFormat, 7872 &portFormat, sizeof(portFormat)); 7873 if (err != OK) { 7874 break; 7875 } 7876 7877 OMX_U32 flexibleEquivalent; 7878 if (IsFlexibleColorFormat( 7879 omx, node, portFormat.eColorFormat, false /* usingNativeWindow */, 7880 &flexibleEquivalent)) { 7881 bool marked = false; 7882 for (size_t i = 0; i < supportedColors.size(); ++i) { 7883 if (supportedColors[i] == flexibleEquivalent) { 7884 marked = true; 7885 break; 7886 } 7887 } 7888 if (!marked) { 7889 supportedColors.push(flexibleEquivalent); 7890 builder->addColorFormat(flexibleEquivalent); 7891 } 7892 } 7893 supportedColors.push(portFormat.eColorFormat); 7894 builder->addColorFormat(portFormat.eColorFormat); 7895 7896 if (index == kMaxIndicesToCheck) { 7897 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 7898 name.c_str(), index, 7899 asString(portFormat.eColorFormat), portFormat.eColorFormat); 7900 } 7901 } 7902 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 7903 // More audio codecs if they have profiles. 7904 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 7905 InitOMXParams(¶m); 7906 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7907 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7908 param.nProfileIndex = index; 7909 status_t err = omx->getParameter( 7910 node, (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 7911 ¶m, sizeof(param)); 7912 if (err != OK) { 7913 break; 7914 } 7915 // For audio, level is ignored. 7916 builder->addProfileLevel(param.eProfile, 0 /* level */); 7917 7918 if (index == kMaxIndicesToCheck) { 7919 ALOGW("[%s] stopping checking profiles after %u: %x", 7920 name.c_str(), index, 7921 param.eProfile); 7922 } 7923 } 7924 7925 // NOTE: Without Android extensions, OMX does not provide a way to query 7926 // AAC profile support 7927 if (param.nProfileIndex == 0) { 7928 ALOGW("component %s doesn't support profile query.", name.c_str()); 7929 } 7930 } 7931 7932 if (isVideo && !isEncoder) { 7933 native_handle_t *sidebandHandle = NULL; 7934 if (omx->configureVideoTunnelMode( 7935 node, kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 7936 // tunneled playback includes adaptive playback 7937 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 7938 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 7939 } else if (omx->storeMetaDataInBuffers( 7940 node, kPortIndexOutput, OMX_TRUE) == OK || 7941 omx->prepareForAdaptivePlayback( 7942 node, kPortIndexOutput, OMX_TRUE, 7943 1280 /* width */, 720 /* height */) == OK) { 7944 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 7945 } 7946 } 7947 7948 if (isVideo && isEncoder) { 7949 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 7950 InitOMXParams(¶ms); 7951 params.nPortIndex = kPortIndexOutput; 7952 // TODO: should we verify if fallback is supported? 7953 if (omx->getConfig( 7954 node, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 7955 ¶ms, sizeof(params)) == OK) { 7956 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 7957 } 7958 } 7959 7960 *caps = builder; 7961 omx->freeNode(node); 7962 client.disconnect(); 7963 return OK; 7964} 7965 7966// These are supposed be equivalent to the logic in 7967// "audio_channel_out_mask_from_count". 7968//static 7969status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 7970 switch (numChannels) { 7971 case 1: 7972 map[0] = OMX_AUDIO_ChannelCF; 7973 break; 7974 case 2: 7975 map[0] = OMX_AUDIO_ChannelLF; 7976 map[1] = OMX_AUDIO_ChannelRF; 7977 break; 7978 case 3: 7979 map[0] = OMX_AUDIO_ChannelLF; 7980 map[1] = OMX_AUDIO_ChannelRF; 7981 map[2] = OMX_AUDIO_ChannelCF; 7982 break; 7983 case 4: 7984 map[0] = OMX_AUDIO_ChannelLF; 7985 map[1] = OMX_AUDIO_ChannelRF; 7986 map[2] = OMX_AUDIO_ChannelLR; 7987 map[3] = OMX_AUDIO_ChannelRR; 7988 break; 7989 case 5: 7990 map[0] = OMX_AUDIO_ChannelLF; 7991 map[1] = OMX_AUDIO_ChannelRF; 7992 map[2] = OMX_AUDIO_ChannelCF; 7993 map[3] = OMX_AUDIO_ChannelLR; 7994 map[4] = OMX_AUDIO_ChannelRR; 7995 break; 7996 case 6: 7997 map[0] = OMX_AUDIO_ChannelLF; 7998 map[1] = OMX_AUDIO_ChannelRF; 7999 map[2] = OMX_AUDIO_ChannelCF; 8000 map[3] = OMX_AUDIO_ChannelLFE; 8001 map[4] = OMX_AUDIO_ChannelLR; 8002 map[5] = OMX_AUDIO_ChannelRR; 8003 break; 8004 case 7: 8005 map[0] = OMX_AUDIO_ChannelLF; 8006 map[1] = OMX_AUDIO_ChannelRF; 8007 map[2] = OMX_AUDIO_ChannelCF; 8008 map[3] = OMX_AUDIO_ChannelLFE; 8009 map[4] = OMX_AUDIO_ChannelLR; 8010 map[5] = OMX_AUDIO_ChannelRR; 8011 map[6] = OMX_AUDIO_ChannelCS; 8012 break; 8013 case 8: 8014 map[0] = OMX_AUDIO_ChannelLF; 8015 map[1] = OMX_AUDIO_ChannelRF; 8016 map[2] = OMX_AUDIO_ChannelCF; 8017 map[3] = OMX_AUDIO_ChannelLFE; 8018 map[4] = OMX_AUDIO_ChannelLR; 8019 map[5] = OMX_AUDIO_ChannelRR; 8020 map[6] = OMX_AUDIO_ChannelLS; 8021 map[7] = OMX_AUDIO_ChannelRS; 8022 break; 8023 default: 8024 return -EINVAL; 8025 } 8026 8027 return OK; 8028} 8029 8030} // namespace android 8031