ACodec.cpp revision a057d0393fd5ed593f163c8235b070e9ad188594
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47 48#include <OMX_AudioExt.h> 49#include <OMX_VideoExt.h> 50#include <OMX_Component.h> 51#include <OMX_IndexExt.h> 52#include <OMX_AsString.h> 53 54#include "include/avc_utils.h" 55#include "include/DataConverter.h" 56#include "omx/OMXUtils.h" 57 58namespace android { 59 60enum { 61 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 62}; 63 64// OMX errors are directly mapped into status_t range if 65// there is no corresponding MediaError status code. 66// Use the statusFromOMXError(int32_t omxError) function. 67// 68// Currently this is a direct map. 69// See frameworks/native/include/media/openmax/OMX_Core.h 70// 71// Vendor OMX errors from 0x90000000 - 0x9000FFFF 72// Extension OMX errors from 0x8F000000 - 0x90000000 73// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 74// 75 76// returns true if err is a recognized OMX error code. 77// as OMX error is OMX_S32, this is an int32_t type 78static inline bool isOMXError(int32_t err) { 79 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 80} 81 82// converts an OMX error to a status_t 83static inline status_t statusFromOMXError(int32_t omxError) { 84 switch (omxError) { 85 case OMX_ErrorInvalidComponentName: 86 case OMX_ErrorComponentNotFound: 87 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 88 default: 89 return isOMXError(omxError) ? omxError : 0; // no translation required 90 } 91} 92 93// checks and converts status_t to a non-side-effect status_t 94static inline status_t makeNoSideEffectStatus(status_t err) { 95 switch (err) { 96 // the following errors have side effects and may come 97 // from other code modules. Remap for safety reasons. 98 case INVALID_OPERATION: 99 case DEAD_OBJECT: 100 return UNKNOWN_ERROR; 101 default: 102 return err; 103 } 104} 105 106struct MessageList : public RefBase { 107 MessageList() { 108 } 109 virtual ~MessageList() { 110 } 111 std::list<sp<AMessage> > &getList() { return mList; } 112private: 113 std::list<sp<AMessage> > mList; 114 115 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 116}; 117 118static sp<DataConverter> getCopyConverter() { 119 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited 120 static sp<DataConverter> sCopyConverter; // zero-inited 121 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); 122 return sCopyConverter; 123} 124 125struct CodecObserver : public BnOMXObserver { 126 CodecObserver() {} 127 128 void setNotificationMessage(const sp<AMessage> &msg) { 129 mNotify = msg; 130 } 131 132 // from IOMXObserver 133 virtual void onMessages(const std::list<omx_message> &messages) { 134 if (messages.empty()) { 135 return; 136 } 137 138 sp<AMessage> notify = mNotify->dup(); 139 bool first = true; 140 sp<MessageList> msgList = new MessageList(); 141 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 142 it != messages.cend(); ++it) { 143 const omx_message &omx_msg = *it; 144 if (first) { 145 notify->setInt32("node", omx_msg.node); 146 first = false; 147 } 148 149 sp<AMessage> msg = new AMessage; 150 msg->setInt32("type", omx_msg.type); 151 switch (omx_msg.type) { 152 case omx_message::EVENT: 153 { 154 msg->setInt32("event", omx_msg.u.event_data.event); 155 msg->setInt32("data1", omx_msg.u.event_data.data1); 156 msg->setInt32("data2", omx_msg.u.event_data.data2); 157 break; 158 } 159 160 case omx_message::EMPTY_BUFFER_DONE: 161 { 162 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 163 msg->setInt32("fence_fd", omx_msg.fenceFd); 164 break; 165 } 166 167 case omx_message::FILL_BUFFER_DONE: 168 { 169 msg->setInt32( 170 "buffer", omx_msg.u.extended_buffer_data.buffer); 171 msg->setInt32( 172 "range_offset", 173 omx_msg.u.extended_buffer_data.range_offset); 174 msg->setInt32( 175 "range_length", 176 omx_msg.u.extended_buffer_data.range_length); 177 msg->setInt32( 178 "flags", 179 omx_msg.u.extended_buffer_data.flags); 180 msg->setInt64( 181 "timestamp", 182 omx_msg.u.extended_buffer_data.timestamp); 183 msg->setInt32( 184 "fence_fd", omx_msg.fenceFd); 185 break; 186 } 187 188 case omx_message::FRAME_RENDERED: 189 { 190 msg->setInt64( 191 "media_time_us", omx_msg.u.render_data.timestamp); 192 msg->setInt64( 193 "system_nano", omx_msg.u.render_data.nanoTime); 194 break; 195 } 196 197 default: 198 ALOGE("Unrecognized message type: %d", omx_msg.type); 199 break; 200 } 201 msgList->getList().push_back(msg); 202 } 203 notify->setObject("messages", msgList); 204 notify->post(); 205 } 206 207protected: 208 virtual ~CodecObserver() {} 209 210private: 211 sp<AMessage> mNotify; 212 213 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 214}; 215 216//////////////////////////////////////////////////////////////////////////////// 217 218struct ACodec::BaseState : public AState { 219 explicit BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 220 221protected: 222 enum PortMode { 223 KEEP_BUFFERS, 224 RESUBMIT_BUFFERS, 225 FREE_BUFFERS, 226 }; 227 228 ACodec *mCodec; 229 230 virtual PortMode getPortMode(OMX_U32 portIndex); 231 232 virtual bool onMessageReceived(const sp<AMessage> &msg); 233 234 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 235 236 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 237 virtual void onInputBufferFilled(const sp<AMessage> &msg); 238 239 void postFillThisBuffer(BufferInfo *info); 240 241private: 242 // Handles an OMX message. Returns true iff message was handled. 243 bool onOMXMessage(const sp<AMessage> &msg); 244 245 // Handles a list of messages. Returns true iff messages were handled. 246 bool onOMXMessageList(const sp<AMessage> &msg); 247 248 // returns true iff this message is for this component and the component is alive 249 bool checkOMXMessage(const sp<AMessage> &msg); 250 251 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 252 253 bool onOMXFillBufferDone( 254 IOMX::buffer_id bufferID, 255 size_t rangeOffset, size_t rangeLength, 256 OMX_U32 flags, 257 int64_t timeUs, 258 int fenceFd); 259 260 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 261 262 void getMoreInputDataIfPossible(); 263 264 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 265}; 266 267//////////////////////////////////////////////////////////////////////////////// 268 269struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 270 explicit DeathNotifier(const sp<AMessage> ¬ify) 271 : mNotify(notify) { 272 } 273 274 virtual void binderDied(const wp<IBinder> &) { 275 mNotify->post(); 276 } 277 278protected: 279 virtual ~DeathNotifier() {} 280 281private: 282 sp<AMessage> mNotify; 283 284 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 285}; 286 287struct ACodec::UninitializedState : public ACodec::BaseState { 288 explicit UninitializedState(ACodec *codec); 289 290protected: 291 virtual bool onMessageReceived(const sp<AMessage> &msg); 292 virtual void stateEntered(); 293 294private: 295 void onSetup(const sp<AMessage> &msg); 296 bool onAllocateComponent(const sp<AMessage> &msg); 297 298 sp<DeathNotifier> mDeathNotifier; 299 300 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 301}; 302 303//////////////////////////////////////////////////////////////////////////////// 304 305struct ACodec::LoadedState : public ACodec::BaseState { 306 explicit LoadedState(ACodec *codec); 307 308protected: 309 virtual bool onMessageReceived(const sp<AMessage> &msg); 310 virtual void stateEntered(); 311 312private: 313 friend struct ACodec::UninitializedState; 314 315 bool onConfigureComponent(const sp<AMessage> &msg); 316 void onCreateInputSurface(const sp<AMessage> &msg); 317 void onSetInputSurface(const sp<AMessage> &msg); 318 void onStart(); 319 void onShutdown(bool keepComponentAllocated); 320 321 status_t setupInputSurface(); 322 323 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 324}; 325 326//////////////////////////////////////////////////////////////////////////////// 327 328struct ACodec::LoadedToIdleState : public ACodec::BaseState { 329 explicit LoadedToIdleState(ACodec *codec); 330 331protected: 332 virtual bool onMessageReceived(const sp<AMessage> &msg); 333 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 334 virtual void stateEntered(); 335 336private: 337 status_t allocateBuffers(); 338 339 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 340}; 341 342//////////////////////////////////////////////////////////////////////////////// 343 344struct ACodec::IdleToExecutingState : public ACodec::BaseState { 345 explicit IdleToExecutingState(ACodec *codec); 346 347protected: 348 virtual bool onMessageReceived(const sp<AMessage> &msg); 349 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 350 virtual void stateEntered(); 351 352private: 353 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 354}; 355 356//////////////////////////////////////////////////////////////////////////////// 357 358struct ACodec::ExecutingState : public ACodec::BaseState { 359 explicit ExecutingState(ACodec *codec); 360 361 void submitRegularOutputBuffers(); 362 void submitOutputMetaBuffers(); 363 void submitOutputBuffers(); 364 365 // Submit output buffers to the decoder, submit input buffers to client 366 // to fill with data. 367 void resume(); 368 369 // Returns true iff input and output buffers are in play. 370 bool active() const { return mActive; } 371 372protected: 373 virtual PortMode getPortMode(OMX_U32 portIndex); 374 virtual bool onMessageReceived(const sp<AMessage> &msg); 375 virtual void stateEntered(); 376 377 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 378 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 379 380private: 381 bool mActive; 382 383 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 384}; 385 386//////////////////////////////////////////////////////////////////////////////// 387 388struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 389 explicit OutputPortSettingsChangedState(ACodec *codec); 390 391protected: 392 virtual PortMode getPortMode(OMX_U32 portIndex); 393 virtual bool onMessageReceived(const sp<AMessage> &msg); 394 virtual void stateEntered(); 395 396 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 397 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 398 399private: 400 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 401}; 402 403//////////////////////////////////////////////////////////////////////////////// 404 405struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 406 explicit ExecutingToIdleState(ACodec *codec); 407 408protected: 409 virtual bool onMessageReceived(const sp<AMessage> &msg); 410 virtual void stateEntered(); 411 412 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 413 414 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 415 virtual void onInputBufferFilled(const sp<AMessage> &msg); 416 417private: 418 void changeStateIfWeOwnAllBuffers(); 419 420 bool mComponentNowIdle; 421 422 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 423}; 424 425//////////////////////////////////////////////////////////////////////////////// 426 427struct ACodec::IdleToLoadedState : public ACodec::BaseState { 428 explicit IdleToLoadedState(ACodec *codec); 429 430protected: 431 virtual bool onMessageReceived(const sp<AMessage> &msg); 432 virtual void stateEntered(); 433 434 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 435 436private: 437 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 438}; 439 440//////////////////////////////////////////////////////////////////////////////// 441 442struct ACodec::FlushingState : public ACodec::BaseState { 443 explicit FlushingState(ACodec *codec); 444 445protected: 446 virtual bool onMessageReceived(const sp<AMessage> &msg); 447 virtual void stateEntered(); 448 449 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 450 451 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 452 virtual void onInputBufferFilled(const sp<AMessage> &msg); 453 454private: 455 bool mFlushComplete[2]; 456 457 void changeStateIfWeOwnAllBuffers(); 458 459 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 460}; 461 462//////////////////////////////////////////////////////////////////////////////// 463 464void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 465 if (mFenceFd >= 0) { 466 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 467 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 468 } 469 mFenceFd = fenceFd; 470 mIsReadFence = false; 471} 472 473void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 474 if (mFenceFd >= 0) { 475 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 476 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 477 } 478 mFenceFd = fenceFd; 479 mIsReadFence = true; 480} 481 482void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 483 if (mFenceFd >= 0 && mIsReadFence) { 484 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 485 } 486} 487 488void ACodec::BufferInfo::checkReadFence(const char *dbg) { 489 if (mFenceFd >= 0 && !mIsReadFence) { 490 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 491 } 492} 493 494//////////////////////////////////////////////////////////////////////////////// 495 496ACodec::ACodec() 497 : mSampleRate(0), 498 mQuirks(0), 499 mNode(0), 500 mUsingNativeWindow(false), 501 mNativeWindowUsageBits(0), 502 mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN), 503 mIsVideo(false), 504 mIsEncoder(false), 505 mFatalError(false), 506 mShutdownInProgress(false), 507 mExplicitShutdown(false), 508 mIsLegacyVP9Decoder(false), 509 mEncoderDelay(0), 510 mEncoderPadding(0), 511 mRotationDegrees(0), 512 mChannelMaskPresent(false), 513 mChannelMask(0), 514 mDequeueCounter(0), 515 mInputMetadataType(kMetadataBufferTypeInvalid), 516 mOutputMetadataType(kMetadataBufferTypeInvalid), 517 mLegacyAdaptiveExperiment(false), 518 mMetadataBuffersToSubmit(0), 519 mNumUndequeuedBuffers(0), 520 mRepeatFrameDelayUs(-1ll), 521 mMaxPtsGapUs(-1ll), 522 mMaxFps(-1), 523 mTimePerFrameUs(-1ll), 524 mTimePerCaptureUs(-1ll), 525 mCreateInputBuffersSuspended(false), 526 mTunneled(false), 527 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0), 528 mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0) { 529 mUninitializedState = new UninitializedState(this); 530 mLoadedState = new LoadedState(this); 531 mLoadedToIdleState = new LoadedToIdleState(this); 532 mIdleToExecutingState = new IdleToExecutingState(this); 533 mExecutingState = new ExecutingState(this); 534 535 mOutputPortSettingsChangedState = 536 new OutputPortSettingsChangedState(this); 537 538 mExecutingToIdleState = new ExecutingToIdleState(this); 539 mIdleToLoadedState = new IdleToLoadedState(this); 540 mFlushingState = new FlushingState(this); 541 542 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 543 mInputEOSResult = OK; 544 545 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 546 547 changeState(mUninitializedState); 548} 549 550ACodec::~ACodec() { 551} 552 553void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 554 mNotify = msg; 555} 556 557void ACodec::initiateSetup(const sp<AMessage> &msg) { 558 msg->setWhat(kWhatSetup); 559 msg->setTarget(this); 560 msg->post(); 561} 562 563void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 564 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 565 msg->setMessage("params", params); 566 msg->post(); 567} 568 569void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 570 msg->setWhat(kWhatAllocateComponent); 571 msg->setTarget(this); 572 msg->post(); 573} 574 575void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 576 msg->setWhat(kWhatConfigureComponent); 577 msg->setTarget(this); 578 msg->post(); 579} 580 581status_t ACodec::setSurface(const sp<Surface> &surface) { 582 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 583 msg->setObject("surface", surface); 584 585 sp<AMessage> response; 586 status_t err = msg->postAndAwaitResponse(&response); 587 588 if (err == OK) { 589 (void)response->findInt32("err", &err); 590 } 591 return err; 592} 593 594void ACodec::initiateCreateInputSurface() { 595 (new AMessage(kWhatCreateInputSurface, this))->post(); 596} 597 598void ACodec::initiateSetInputSurface( 599 const sp<PersistentSurface> &surface) { 600 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 601 msg->setObject("input-surface", surface); 602 msg->post(); 603} 604 605void ACodec::signalEndOfInputStream() { 606 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 607} 608 609void ACodec::initiateStart() { 610 (new AMessage(kWhatStart, this))->post(); 611} 612 613void ACodec::signalFlush() { 614 ALOGV("[%s] signalFlush", mComponentName.c_str()); 615 (new AMessage(kWhatFlush, this))->post(); 616} 617 618void ACodec::signalResume() { 619 (new AMessage(kWhatResume, this))->post(); 620} 621 622void ACodec::initiateShutdown(bool keepComponentAllocated) { 623 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 624 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 625 msg->post(); 626 if (!keepComponentAllocated) { 627 // ensure shutdown completes in 3 seconds 628 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 629 } 630} 631 632void ACodec::signalRequestIDRFrame() { 633 (new AMessage(kWhatRequestIDRFrame, this))->post(); 634} 635 636// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 637// Some codecs may return input buffers before having them processed. 638// This causes a halt if we already signaled an EOS on the input 639// port. For now keep submitting an output buffer if there was an 640// EOS on the input port, but not yet on the output port. 641void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 642 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 643 mMetadataBuffersToSubmit > 0) { 644 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 645 } 646} 647 648status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 649 // allow keeping unset surface 650 if (surface == NULL) { 651 if (mNativeWindow != NULL) { 652 ALOGW("cannot unset a surface"); 653 return INVALID_OPERATION; 654 } 655 return OK; 656 } 657 658 // cannot switch from bytebuffers to surface 659 if (mNativeWindow == NULL) { 660 ALOGW("component was not configured with a surface"); 661 return INVALID_OPERATION; 662 } 663 664 ANativeWindow *nativeWindow = surface.get(); 665 // if we have not yet started the codec, we can simply set the native window 666 if (mBuffers[kPortIndexInput].size() == 0) { 667 mNativeWindow = surface; 668 return OK; 669 } 670 671 // we do not support changing a tunneled surface after start 672 if (mTunneled) { 673 ALOGW("cannot change tunneled surface"); 674 return INVALID_OPERATION; 675 } 676 677 int usageBits = 0; 678 // no need to reconnect as we will not dequeue all buffers 679 status_t err = setupNativeWindowSizeFormatAndUsage( 680 nativeWindow, &usageBits, 681 !storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment /* reconnect */); 682 if (err != OK) { 683 return err; 684 } 685 686 int ignoredFlags = kVideoGrallocUsage; 687 // New output surface is not allowed to add new usage flag except ignored ones. 688 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 689 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 690 return BAD_VALUE; 691 } 692 693 // get min undequeued count. We cannot switch to a surface that has a higher 694 // undequeued count than we allocated. 695 int minUndequeuedBuffers = 0; 696 err = nativeWindow->query( 697 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 698 &minUndequeuedBuffers); 699 if (err != 0) { 700 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 701 strerror(-err), -err); 702 return err; 703 } 704 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 705 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 706 minUndequeuedBuffers, mNumUndequeuedBuffers); 707 return BAD_VALUE; 708 } 709 710 // we cannot change the number of output buffers while OMX is running 711 // set up surface to the same count 712 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 713 ALOGV("setting up surface for %zu buffers", buffers.size()); 714 715 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 716 if (err != 0) { 717 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 718 -err); 719 return err; 720 } 721 722 // need to enable allocation when attaching 723 surface->getIGraphicBufferProducer()->allowAllocation(true); 724 725 // for meta data mode, we move dequeud buffers to the new surface. 726 // for non-meta mode, we must move all registered buffers 727 for (size_t i = 0; i < buffers.size(); ++i) { 728 const BufferInfo &info = buffers[i]; 729 // skip undequeued buffers for meta data mode 730 if (storingMetadataInDecodedBuffers() 731 && !mLegacyAdaptiveExperiment 732 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 733 ALOGV("skipping buffer"); 734 continue; 735 } 736 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 737 738 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 739 if (err != OK) { 740 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 741 info.mGraphicBuffer->getNativeBuffer(), 742 strerror(-err), -err); 743 return err; 744 } 745 } 746 747 // cancel undequeued buffers to new surface 748 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 749 for (size_t i = 0; i < buffers.size(); ++i) { 750 BufferInfo &info = buffers.editItemAt(i); 751 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 752 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 753 err = nativeWindow->cancelBuffer( 754 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 755 info.mFenceFd = -1; 756 if (err != OK) { 757 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 758 info.mGraphicBuffer->getNativeBuffer(), 759 strerror(-err), -err); 760 return err; 761 } 762 } 763 } 764 // disallow further allocation 765 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 766 } 767 768 // push blank buffers to previous window if requested 769 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 770 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 771 } 772 773 mNativeWindow = nativeWindow; 774 mNativeWindowUsageBits = usageBits; 775 return OK; 776} 777 778status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 779 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 780 781 CHECK(mDealer[portIndex] == NULL); 782 CHECK(mBuffers[portIndex].isEmpty()); 783 784 status_t err; 785 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 786 if (storingMetadataInDecodedBuffers()) { 787 err = allocateOutputMetadataBuffers(); 788 } else { 789 err = allocateOutputBuffersFromNativeWindow(); 790 } 791 } else { 792 OMX_PARAM_PORTDEFINITIONTYPE def; 793 InitOMXParams(&def); 794 def.nPortIndex = portIndex; 795 796 err = mOMX->getParameter( 797 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 798 799 if (err == OK) { 800 MetadataBufferType type = 801 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 802 size_t bufSize = def.nBufferSize; 803 if (type == kMetadataBufferTypeANWBuffer) { 804 bufSize = sizeof(VideoNativeMetadata); 805 } else if (type == kMetadataBufferTypeNativeHandleSource) { 806 bufSize = sizeof(VideoNativeHandleMetadata); 807 } 808 809 // If using gralloc or native source input metadata buffers, allocate largest 810 // metadata size as we prefer to generate native source metadata, but component 811 // may require gralloc source. For camera source, allocate at least enough 812 // size for native metadata buffers. 813 size_t allottedSize = bufSize; 814 if (portIndex == kPortIndexInput && type == kMetadataBufferTypeANWBuffer) { 815 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 816 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 817 bufSize = max(bufSize, sizeof(VideoNativeMetadata)); 818 } 819 820 size_t conversionBufferSize = 0; 821 822 sp<DataConverter> converter = mConverter[portIndex]; 823 if (converter != NULL) { 824 // here we assume sane conversions of max 4:1, so result fits in int32 825 if (portIndex == kPortIndexInput) { 826 conversionBufferSize = converter->sourceSize(bufSize); 827 } else { 828 conversionBufferSize = converter->targetSize(bufSize); 829 } 830 } 831 832 size_t alignment = MemoryDealer::getAllocationAlignment(); 833 834 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", 835 mComponentName.c_str(), 836 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 837 portIndex == kPortIndexInput ? "input" : "output"); 838 839 // verify buffer sizes to avoid overflow in align() 840 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { 841 ALOGE("b/22885421"); 842 return NO_MEMORY; 843 } 844 845 // don't modify bufSize as OMX may not expect it to increase after negotiation 846 size_t alignedSize = align(bufSize, alignment); 847 size_t alignedConvSize = align(conversionBufferSize, alignment); 848 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { 849 ALOGE("b/22885421"); 850 return NO_MEMORY; 851 } 852 853 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); 854 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 855 856 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 857 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 858 if (mem == NULL || mem->pointer() == NULL) { 859 return NO_MEMORY; 860 } 861 862 BufferInfo info; 863 info.mStatus = BufferInfo::OWNED_BY_US; 864 info.mFenceFd = -1; 865 info.mRenderInfo = NULL; 866 info.mNativeHandle = NULL; 867 868 uint32_t requiresAllocateBufferBit = 869 (portIndex == kPortIndexInput) 870 ? kRequiresAllocateBufferOnInputPorts 871 : kRequiresAllocateBufferOnOutputPorts; 872 873 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 874 mem.clear(); 875 876 void *ptr = NULL; 877 sp<NativeHandle> native_handle; 878 err = mOMX->allocateSecureBuffer( 879 mNode, portIndex, bufSize, &info.mBufferID, 880 &ptr, &native_handle); 881 882 // TRICKY: this representation is unorthodox, but ACodec requires 883 // an ABuffer with a proper size to validate range offsets and lengths. 884 // Since mData is never referenced for secure input, it is used to store 885 // either the pointer to the secure buffer, or the opaque handle as on 886 // some devices ptr is actually an opaque handle, not a pointer. 887 888 // TRICKY2: use native handle as the base of the ABuffer if received one, 889 // because Widevine source only receives these base addresses. 890 const native_handle_t *native_handle_ptr = 891 native_handle == NULL ? NULL : native_handle->handle(); 892 info.mData = new ABuffer( 893 ptr != NULL ? ptr : (void *)native_handle_ptr, bufSize); 894 info.mNativeHandle = native_handle; 895 info.mCodecData = info.mData; 896 } else if (mQuirks & requiresAllocateBufferBit) { 897 err = mOMX->allocateBufferWithBackup( 898 mNode, portIndex, mem, &info.mBufferID, allottedSize); 899 } else { 900 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize); 901 } 902 903 if (mem != NULL) { 904 info.mCodecData = new ABuffer(mem->pointer(), bufSize); 905 info.mCodecRef = mem; 906 907 if (type == kMetadataBufferTypeANWBuffer) { 908 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 909 } 910 911 // if we require conversion, allocate conversion buffer for client use; 912 // otherwise, reuse codec buffer 913 if (mConverter[portIndex] != NULL) { 914 CHECK_GT(conversionBufferSize, (size_t)0); 915 mem = mDealer[portIndex]->allocate(conversionBufferSize); 916 if (mem == NULL|| mem->pointer() == NULL) { 917 return NO_MEMORY; 918 } 919 info.mData = new ABuffer(mem->pointer(), conversionBufferSize); 920 info.mMemRef = mem; 921 } else { 922 info.mData = info.mCodecData; 923 info.mMemRef = info.mCodecRef; 924 } 925 } 926 927 mBuffers[portIndex].push(info); 928 } 929 } 930 } 931 932 if (err != OK) { 933 return err; 934 } 935 936 sp<AMessage> notify = mNotify->dup(); 937 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 938 939 notify->setInt32("portIndex", portIndex); 940 941 sp<PortDescription> desc = new PortDescription; 942 943 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 944 const BufferInfo &info = mBuffers[portIndex][i]; 945 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef); 946 } 947 948 notify->setObject("portDesc", desc); 949 notify->post(); 950 951 return OK; 952} 953 954status_t ACodec::setupNativeWindowSizeFormatAndUsage( 955 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */, 956 bool reconnect) { 957 OMX_PARAM_PORTDEFINITIONTYPE def; 958 InitOMXParams(&def); 959 def.nPortIndex = kPortIndexOutput; 960 961 status_t err = mOMX->getParameter( 962 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 963 964 if (err != OK) { 965 return err; 966 } 967 968 OMX_U32 usage = 0; 969 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 970 if (err != 0) { 971 ALOGW("querying usage flags from OMX IL component failed: %d", err); 972 // XXX: Currently this error is logged, but not fatal. 973 usage = 0; 974 } 975 int omxUsage = usage; 976 977 if (mFlags & kFlagIsGrallocUsageProtected) { 978 usage |= GRALLOC_USAGE_PROTECTED; 979 } 980 981 usage |= kVideoGrallocUsage; 982 *finalUsage = usage; 983 984 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 985 mLastNativeWindowDataSpace = HAL_DATASPACE_UNKNOWN; 986 987 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 988 return setNativeWindowSizeFormatAndUsage( 989 nativeWindow, 990 def.format.video.nFrameWidth, 991 def.format.video.nFrameHeight, 992 def.format.video.eColorFormat, 993 mRotationDegrees, 994 usage, 995 reconnect); 996} 997 998status_t ACodec::configureOutputBuffersFromNativeWindow( 999 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 1000 OMX_U32 *minUndequeuedBuffers, bool preregister) { 1001 1002 OMX_PARAM_PORTDEFINITIONTYPE def; 1003 InitOMXParams(&def); 1004 def.nPortIndex = kPortIndexOutput; 1005 1006 status_t err = mOMX->getParameter( 1007 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1008 1009 if (err == OK) { 1010 err = setupNativeWindowSizeFormatAndUsage( 1011 mNativeWindow.get(), &mNativeWindowUsageBits, preregister /* reconnect */); 1012 } 1013 if (err != OK) { 1014 mNativeWindowUsageBits = 0; 1015 return err; 1016 } 1017 1018 // Exits here for tunneled video playback codecs -- i.e. skips native window 1019 // buffer allocation step as this is managed by the tunneled OMX omponent 1020 // itself and explicitly sets def.nBufferCountActual to 0. 1021 if (mTunneled) { 1022 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 1023 def.nBufferCountActual = 0; 1024 err = mOMX->setParameter( 1025 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1026 1027 *minUndequeuedBuffers = 0; 1028 *bufferCount = 0; 1029 *bufferSize = 0; 1030 return err; 1031 } 1032 1033 *minUndequeuedBuffers = 0; 1034 err = mNativeWindow->query( 1035 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 1036 (int *)minUndequeuedBuffers); 1037 1038 if (err != 0) { 1039 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 1040 strerror(-err), -err); 1041 return err; 1042 } 1043 1044 // FIXME: assume that surface is controlled by app (native window 1045 // returns the number for the case when surface is not controlled by app) 1046 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 1047 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 1048 1049 // Use conservative allocation while also trying to reduce starvation 1050 // 1051 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 1052 // minimum needed for the consumer to be able to work 1053 // 2. try to allocate two (2) additional buffers to reduce starvation from 1054 // the consumer 1055 // plus an extra buffer to account for incorrect minUndequeuedBufs 1056 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 1057 OMX_U32 newBufferCount = 1058 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1059 def.nBufferCountActual = newBufferCount; 1060 err = mOMX->setParameter( 1061 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1062 1063 if (err == OK) { 1064 *minUndequeuedBuffers += extraBuffers; 1065 break; 1066 } 1067 1068 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1069 mComponentName.c_str(), newBufferCount, err); 1070 /* exit condition */ 1071 if (extraBuffers == 0) { 1072 return err; 1073 } 1074 } 1075 1076 err = native_window_set_buffer_count( 1077 mNativeWindow.get(), def.nBufferCountActual); 1078 1079 if (err != 0) { 1080 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1081 -err); 1082 return err; 1083 } 1084 1085 *bufferCount = def.nBufferCountActual; 1086 *bufferSize = def.nBufferSize; 1087 return err; 1088} 1089 1090status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1091 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1092 status_t err = configureOutputBuffersFromNativeWindow( 1093 &bufferCount, &bufferSize, &minUndequeuedBuffers, true /* preregister */); 1094 if (err != 0) 1095 return err; 1096 mNumUndequeuedBuffers = minUndequeuedBuffers; 1097 1098 if (!storingMetadataInDecodedBuffers()) { 1099 static_cast<Surface*>(mNativeWindow.get()) 1100 ->getIGraphicBufferProducer()->allowAllocation(true); 1101 } 1102 1103 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1104 "output port", 1105 mComponentName.c_str(), bufferCount, bufferSize); 1106 1107 // Dequeue buffers and send them to OMX 1108 for (OMX_U32 i = 0; i < bufferCount; i++) { 1109 ANativeWindowBuffer *buf; 1110 int fenceFd; 1111 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1112 if (err != 0) { 1113 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1114 break; 1115 } 1116 1117 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1118 BufferInfo info; 1119 info.mStatus = BufferInfo::OWNED_BY_US; 1120 info.mFenceFd = fenceFd; 1121 info.mIsReadFence = false; 1122 info.mRenderInfo = NULL; 1123 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 1124 info.mCodecData = info.mData; 1125 info.mGraphicBuffer = graphicBuffer; 1126 mBuffers[kPortIndexOutput].push(info); 1127 1128 IOMX::buffer_id bufferId; 1129 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 1130 &bufferId); 1131 if (err != 0) { 1132 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1133 "%d", i, err); 1134 break; 1135 } 1136 1137 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1138 1139 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1140 mComponentName.c_str(), 1141 bufferId, graphicBuffer.get()); 1142 } 1143 1144 OMX_U32 cancelStart; 1145 OMX_U32 cancelEnd; 1146 1147 if (err != 0) { 1148 // If an error occurred while dequeuing we need to cancel any buffers 1149 // that were dequeued. 1150 cancelStart = 0; 1151 cancelEnd = mBuffers[kPortIndexOutput].size(); 1152 } else { 1153 // Return the required minimum undequeued buffers to the native window. 1154 cancelStart = bufferCount - minUndequeuedBuffers; 1155 cancelEnd = bufferCount; 1156 } 1157 1158 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1159 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1160 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1161 status_t error = cancelBufferToNativeWindow(info); 1162 if (err == 0) { 1163 err = error; 1164 } 1165 } 1166 } 1167 1168 if (!storingMetadataInDecodedBuffers()) { 1169 static_cast<Surface*>(mNativeWindow.get()) 1170 ->getIGraphicBufferProducer()->allowAllocation(false); 1171 } 1172 1173 return err; 1174} 1175 1176status_t ACodec::allocateOutputMetadataBuffers() { 1177 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1178 status_t err = configureOutputBuffersFromNativeWindow( 1179 &bufferCount, &bufferSize, &minUndequeuedBuffers, 1180 mLegacyAdaptiveExperiment /* preregister */); 1181 if (err != 0) 1182 return err; 1183 mNumUndequeuedBuffers = minUndequeuedBuffers; 1184 1185 ALOGV("[%s] Allocating %u meta buffers on output port", 1186 mComponentName.c_str(), bufferCount); 1187 1188 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1189 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1190 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1191 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1192 1193 // Dequeue buffers and send them to OMX 1194 for (OMX_U32 i = 0; i < bufferCount; i++) { 1195 BufferInfo info; 1196 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1197 info.mFenceFd = -1; 1198 info.mRenderInfo = NULL; 1199 info.mGraphicBuffer = NULL; 1200 info.mDequeuedAt = mDequeueCounter; 1201 1202 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1203 if (mem == NULL || mem->pointer() == NULL) { 1204 return NO_MEMORY; 1205 } 1206 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1207 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1208 } 1209 info.mData = new ABuffer(mem->pointer(), mem->size()); 1210 info.mMemRef = mem; 1211 info.mCodecData = info.mData; 1212 info.mCodecRef = mem; 1213 1214 // we use useBuffer for metadata regardless of quirks 1215 err = mOMX->useBuffer( 1216 mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1217 mBuffers[kPortIndexOutput].push(info); 1218 1219 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1220 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1221 } 1222 1223 if (mLegacyAdaptiveExperiment) { 1224 // preallocate and preregister buffers 1225 static_cast<Surface *>(mNativeWindow.get()) 1226 ->getIGraphicBufferProducer()->allowAllocation(true); 1227 1228 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1229 "output port", 1230 mComponentName.c_str(), bufferCount, bufferSize); 1231 1232 // Dequeue buffers then cancel them all 1233 for (OMX_U32 i = 0; i < bufferCount; i++) { 1234 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1235 1236 ANativeWindowBuffer *buf; 1237 int fenceFd; 1238 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1239 if (err != 0) { 1240 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1241 break; 1242 } 1243 1244 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1245 mOMX->updateGraphicBufferInMeta( 1246 mNode, kPortIndexOutput, graphicBuffer, info->mBufferID); 1247 info->mStatus = BufferInfo::OWNED_BY_US; 1248 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1249 info->mGraphicBuffer = graphicBuffer; 1250 } 1251 1252 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1253 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1254 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1255 status_t error = cancelBufferToNativeWindow(info); 1256 if (err == OK) { 1257 err = error; 1258 } 1259 } 1260 } 1261 1262 static_cast<Surface*>(mNativeWindow.get()) 1263 ->getIGraphicBufferProducer()->allowAllocation(false); 1264 } 1265 1266 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1267 return err; 1268} 1269 1270status_t ACodec::submitOutputMetadataBuffer() { 1271 CHECK(storingMetadataInDecodedBuffers()); 1272 if (mMetadataBuffersToSubmit == 0) 1273 return OK; 1274 1275 BufferInfo *info = dequeueBufferFromNativeWindow(); 1276 if (info == NULL) { 1277 return ERROR_IO; 1278 } 1279 1280 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1281 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1282 1283 --mMetadataBuffersToSubmit; 1284 info->checkWriteFence("submitOutputMetadataBuffer"); 1285 status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd); 1286 info->mFenceFd = -1; 1287 if (err == OK) { 1288 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1289 } 1290 1291 return err; 1292} 1293 1294status_t ACodec::waitForFence(int fd, const char *dbg ) { 1295 status_t res = OK; 1296 if (fd >= 0) { 1297 sp<Fence> fence = new Fence(fd); 1298 res = fence->wait(IOMX::kFenceTimeoutMs); 1299 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1300 } 1301 return res; 1302} 1303 1304// static 1305const char *ACodec::_asString(BufferInfo::Status s) { 1306 switch (s) { 1307 case BufferInfo::OWNED_BY_US: return "OUR"; 1308 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1309 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1310 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1311 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1312 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1313 default: return "?"; 1314 } 1315} 1316 1317void ACodec::dumpBuffers(OMX_U32 portIndex) { 1318 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1319 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1320 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1321 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1322 const BufferInfo &info = mBuffers[portIndex][i]; 1323 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1324 i, info.mBufferID, info.mGraphicBuffer.get(), 1325 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1326 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1327 } 1328} 1329 1330status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1331 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1332 1333 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1334 mComponentName.c_str(), info->mBufferID); 1335 1336 info->checkWriteFence("cancelBufferToNativeWindow"); 1337 int err = mNativeWindow->cancelBuffer( 1338 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1339 info->mFenceFd = -1; 1340 1341 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1342 mComponentName.c_str(), info->mBufferID); 1343 // change ownership even if cancelBuffer fails 1344 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1345 1346 return err; 1347} 1348 1349void ACodec::updateRenderInfoForDequeuedBuffer( 1350 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1351 1352 info->mRenderInfo = 1353 mRenderTracker.updateInfoForDequeuedBuffer( 1354 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1355 1356 // check for any fences already signaled 1357 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1358} 1359 1360void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1361 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1362 mRenderTracker.dumpRenderQueue(); 1363 } 1364} 1365 1366void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1367 sp<AMessage> msg = mNotify->dup(); 1368 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1369 std::list<FrameRenderTracker::Info> done = 1370 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1371 1372 // unlink untracked frames 1373 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1374 it != done.cend(); ++it) { 1375 ssize_t index = it->getIndex(); 1376 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1377 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1378 } else if (index >= 0) { 1379 // THIS SHOULD NEVER HAPPEN 1380 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1381 } 1382 } 1383 1384 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1385 msg->post(); 1386 } 1387} 1388 1389ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1390 ANativeWindowBuffer *buf; 1391 CHECK(mNativeWindow.get() != NULL); 1392 1393 if (mTunneled) { 1394 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1395 " video playback mode mode!"); 1396 return NULL; 1397 } 1398 1399 if (mFatalError) { 1400 ALOGW("not dequeuing from native window due to fatal error"); 1401 return NULL; 1402 } 1403 1404 int fenceFd = -1; 1405 do { 1406 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1407 if (err != 0) { 1408 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1409 return NULL; 1410 } 1411 1412 bool stale = false; 1413 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1414 i--; 1415 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1416 1417 if (info->mGraphicBuffer != NULL && 1418 info->mGraphicBuffer->handle == buf->handle) { 1419 // Since consumers can attach buffers to BufferQueues, it is possible 1420 // that a known yet stale buffer can return from a surface that we 1421 // once used. We can simply ignore this as we have already dequeued 1422 // this buffer properly. NOTE: this does not eliminate all cases, 1423 // e.g. it is possible that we have queued the valid buffer to the 1424 // NW, and a stale copy of the same buffer gets dequeued - which will 1425 // be treated as the valid buffer by ACodec. 1426 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1427 ALOGI("dequeued stale buffer %p. discarding", buf); 1428 stale = true; 1429 break; 1430 } 1431 1432 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1433 info->mStatus = BufferInfo::OWNED_BY_US; 1434 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1435 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1436 return info; 1437 } 1438 } 1439 1440 // It is also possible to receive a previously unregistered buffer 1441 // in non-meta mode. These should be treated as stale buffers. The 1442 // same is possible in meta mode, in which case, it will be treated 1443 // as a normal buffer, which is not desirable. 1444 // TODO: fix this. 1445 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1446 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1447 stale = true; 1448 } 1449 if (stale) { 1450 // TODO: detach stale buffer, but there is no API yet to do it. 1451 buf = NULL; 1452 } 1453 } while (buf == NULL); 1454 1455 // get oldest undequeued buffer 1456 BufferInfo *oldest = NULL; 1457 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1458 i--; 1459 BufferInfo *info = 1460 &mBuffers[kPortIndexOutput].editItemAt(i); 1461 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1462 (oldest == NULL || 1463 // avoid potential issues from counter rolling over 1464 mDequeueCounter - info->mDequeuedAt > 1465 mDequeueCounter - oldest->mDequeuedAt)) { 1466 oldest = info; 1467 } 1468 } 1469 1470 // it is impossible dequeue a buffer when there are no buffers with ANW 1471 CHECK(oldest != NULL); 1472 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1473 // while loop above does not complete 1474 CHECK(storingMetadataInDecodedBuffers()); 1475 1476 // discard buffer in LRU info and replace with new buffer 1477 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1478 oldest->mStatus = BufferInfo::OWNED_BY_US; 1479 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1480 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1481 oldest->mRenderInfo = NULL; 1482 1483 mOMX->updateGraphicBufferInMeta( 1484 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 1485 oldest->mBufferID); 1486 1487 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1488 VideoGrallocMetadata *grallocMeta = 1489 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base()); 1490 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1491 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1492 mDequeueCounter - oldest->mDequeuedAt, 1493 (void *)(uintptr_t)grallocMeta->pHandle, 1494 oldest->mGraphicBuffer->handle, oldest->mData->base()); 1495 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1496 VideoNativeMetadata *nativeMeta = 1497 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base()); 1498 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1499 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1500 mDequeueCounter - oldest->mDequeuedAt, 1501 (void *)(uintptr_t)nativeMeta->pBuffer, 1502 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base()); 1503 } 1504 1505 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1506 return oldest; 1507} 1508 1509status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1510 status_t err = OK; 1511 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1512 i--; 1513 status_t err2 = freeBuffer(portIndex, i); 1514 if (err == OK) { 1515 err = err2; 1516 } 1517 } 1518 1519 // clear mDealer even on an error 1520 mDealer[portIndex].clear(); 1521 return err; 1522} 1523 1524status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1525 status_t err = OK; 1526 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1527 i--; 1528 BufferInfo *info = 1529 &mBuffers[kPortIndexOutput].editItemAt(i); 1530 1531 // At this time some buffers may still be with the component 1532 // or being drained. 1533 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1534 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1535 status_t err2 = freeBuffer(kPortIndexOutput, i); 1536 if (err == OK) { 1537 err = err2; 1538 } 1539 } 1540 } 1541 1542 return err; 1543} 1544 1545status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1546 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1547 status_t err = OK; 1548 1549 // there should not be any fences in the metadata 1550 MetadataBufferType type = 1551 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1552 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL 1553 && info->mData->size() >= sizeof(VideoNativeMetadata)) { 1554 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; 1555 if (fenceFd >= 0) { 1556 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1557 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1558 } 1559 } 1560 1561 switch (info->mStatus) { 1562 case BufferInfo::OWNED_BY_US: 1563 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1564 (void)cancelBufferToNativeWindow(info); 1565 } 1566 // fall through 1567 1568 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1569 err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID); 1570 break; 1571 1572 default: 1573 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1574 err = FAILED_TRANSACTION; 1575 break; 1576 } 1577 1578 if (info->mFenceFd >= 0) { 1579 ::close(info->mFenceFd); 1580 } 1581 1582 if (portIndex == kPortIndexOutput) { 1583 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1584 info->mRenderInfo = NULL; 1585 } 1586 1587 // remove buffer even if mOMX->freeBuffer fails 1588 mBuffers[portIndex].removeAt(i); 1589 return err; 1590} 1591 1592ACodec::BufferInfo *ACodec::findBufferByID( 1593 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1594 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1595 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1596 1597 if (info->mBufferID == bufferID) { 1598 if (index != NULL) { 1599 *index = i; 1600 } 1601 return info; 1602 } 1603 } 1604 1605 ALOGE("Could not find buffer with ID %u", bufferID); 1606 return NULL; 1607} 1608 1609status_t ACodec::setComponentRole( 1610 bool isEncoder, const char *mime) { 1611 const char *role = getComponentRole(isEncoder, mime); 1612 if (role == NULL) { 1613 return BAD_VALUE; 1614 } 1615 status_t err = setComponentRole(mOMX, mNode, role); 1616 if (err != OK) { 1617 ALOGW("[%s] Failed to set standard component role '%s'.", 1618 mComponentName.c_str(), role); 1619 } 1620 return err; 1621} 1622 1623//static 1624const char *ACodec::getComponentRole( 1625 bool isEncoder, const char *mime) { 1626 struct MimeToRole { 1627 const char *mime; 1628 const char *decoderRole; 1629 const char *encoderRole; 1630 }; 1631 1632 static const MimeToRole kMimeToRole[] = { 1633 { MEDIA_MIMETYPE_AUDIO_MPEG, 1634 "audio_decoder.mp3", "audio_encoder.mp3" }, 1635 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I, 1636 "audio_decoder.mp1", "audio_encoder.mp1" }, 1637 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, 1638 "audio_decoder.mp2", "audio_encoder.mp2" }, 1639 { MEDIA_MIMETYPE_AUDIO_AMR_NB, 1640 "audio_decoder.amrnb", "audio_encoder.amrnb" }, 1641 { MEDIA_MIMETYPE_AUDIO_AMR_WB, 1642 "audio_decoder.amrwb", "audio_encoder.amrwb" }, 1643 { MEDIA_MIMETYPE_AUDIO_AAC, 1644 "audio_decoder.aac", "audio_encoder.aac" }, 1645 { MEDIA_MIMETYPE_AUDIO_VORBIS, 1646 "audio_decoder.vorbis", "audio_encoder.vorbis" }, 1647 { MEDIA_MIMETYPE_AUDIO_OPUS, 1648 "audio_decoder.opus", "audio_encoder.opus" }, 1649 { MEDIA_MIMETYPE_AUDIO_G711_MLAW, 1650 "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" }, 1651 { MEDIA_MIMETYPE_AUDIO_G711_ALAW, 1652 "audio_decoder.g711alaw", "audio_encoder.g711alaw" }, 1653 { MEDIA_MIMETYPE_VIDEO_AVC, 1654 "video_decoder.avc", "video_encoder.avc" }, 1655 { MEDIA_MIMETYPE_VIDEO_HEVC, 1656 "video_decoder.hevc", "video_encoder.hevc" }, 1657 { MEDIA_MIMETYPE_VIDEO_MPEG4, 1658 "video_decoder.mpeg4", "video_encoder.mpeg4" }, 1659 { MEDIA_MIMETYPE_VIDEO_H263, 1660 "video_decoder.h263", "video_encoder.h263" }, 1661 { MEDIA_MIMETYPE_VIDEO_VP8, 1662 "video_decoder.vp8", "video_encoder.vp8" }, 1663 { MEDIA_MIMETYPE_VIDEO_VP9, 1664 "video_decoder.vp9", "video_encoder.vp9" }, 1665 { MEDIA_MIMETYPE_AUDIO_RAW, 1666 "audio_decoder.raw", "audio_encoder.raw" }, 1667 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, 1668 "video_decoder.dolby-vision", "video_encoder.dolby-vision" }, 1669 { MEDIA_MIMETYPE_AUDIO_FLAC, 1670 "audio_decoder.flac", "audio_encoder.flac" }, 1671 { MEDIA_MIMETYPE_AUDIO_MSGSM, 1672 "audio_decoder.gsm", "audio_encoder.gsm" }, 1673 { MEDIA_MIMETYPE_VIDEO_MPEG2, 1674 "video_decoder.mpeg2", "video_encoder.mpeg2" }, 1675 { MEDIA_MIMETYPE_AUDIO_AC3, 1676 "audio_decoder.ac3", "audio_encoder.ac3" }, 1677 { MEDIA_MIMETYPE_AUDIO_EAC3, 1678 "audio_decoder.eac3", "audio_encoder.eac3" }, 1679 }; 1680 1681 static const size_t kNumMimeToRole = 1682 sizeof(kMimeToRole) / sizeof(kMimeToRole[0]); 1683 1684 size_t i; 1685 for (i = 0; i < kNumMimeToRole; ++i) { 1686 if (!strcasecmp(mime, kMimeToRole[i].mime)) { 1687 break; 1688 } 1689 } 1690 1691 if (i == kNumMimeToRole) { 1692 return NULL; 1693 } 1694 1695 return isEncoder ? kMimeToRole[i].encoderRole 1696 : kMimeToRole[i].decoderRole; 1697} 1698 1699//static 1700status_t ACodec::setComponentRole( 1701 const sp<IOMX> &omx, IOMX::node_id node, const char *role) { 1702 OMX_PARAM_COMPONENTROLETYPE roleParams; 1703 InitOMXParams(&roleParams); 1704 1705 strncpy((char *)roleParams.cRole, 1706 role, OMX_MAX_STRINGNAME_SIZE - 1); 1707 1708 roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0'; 1709 1710 return omx->setParameter( 1711 node, OMX_IndexParamStandardComponentRole, 1712 &roleParams, sizeof(roleParams)); 1713} 1714 1715status_t ACodec::configureCodec( 1716 const char *mime, const sp<AMessage> &msg) { 1717 int32_t encoder; 1718 if (!msg->findInt32("encoder", &encoder)) { 1719 encoder = false; 1720 } 1721 1722 sp<AMessage> inputFormat = new AMessage; 1723 sp<AMessage> outputFormat = new AMessage; 1724 mConfigFormat = msg; 1725 1726 mIsEncoder = encoder; 1727 1728 mInputMetadataType = kMetadataBufferTypeInvalid; 1729 mOutputMetadataType = kMetadataBufferTypeInvalid; 1730 1731 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1732 1733 if (err != OK) { 1734 return err; 1735 } 1736 1737 int32_t bitRate = 0; 1738 // FLAC encoder doesn't need a bitrate, other encoders do 1739 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1740 && !msg->findInt32("bitrate", &bitRate)) { 1741 return INVALID_OPERATION; 1742 } 1743 1744 // propagate bitrate to the output so that the muxer has it 1745 if (encoder && msg->findInt32("bitrate", &bitRate)) { 1746 // Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the 1747 // average bitrate. We've been setting both bitrate and max-bitrate to this same value. 1748 outputFormat->setInt32("bitrate", bitRate); 1749 outputFormat->setInt32("max-bitrate", bitRate); 1750 } 1751 1752 int32_t storeMeta; 1753 if (encoder 1754 && msg->findInt32("android._input-metadata-buffer-type", &storeMeta) 1755 && storeMeta != kMetadataBufferTypeInvalid) { 1756 mInputMetadataType = (MetadataBufferType)storeMeta; 1757 err = mOMX->storeMetaDataInBuffers( 1758 mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1759 if (err != OK) { 1760 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1761 mComponentName.c_str(), err); 1762 1763 return err; 1764 } else if (storeMeta == kMetadataBufferTypeANWBuffer 1765 && mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1766 // IOMX translates ANWBuffers to gralloc source already. 1767 mInputMetadataType = (MetadataBufferType)storeMeta; 1768 } 1769 1770 uint32_t usageBits; 1771 if (mOMX->getParameter( 1772 mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1773 &usageBits, sizeof(usageBits)) == OK) { 1774 inputFormat->setInt32( 1775 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1776 } 1777 } 1778 1779 int32_t prependSPSPPS = 0; 1780 if (encoder 1781 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1782 && prependSPSPPS != 0) { 1783 OMX_INDEXTYPE index; 1784 err = mOMX->getExtensionIndex( 1785 mNode, 1786 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1787 &index); 1788 1789 if (err == OK) { 1790 PrependSPSPPSToIDRFramesParams params; 1791 InitOMXParams(¶ms); 1792 params.bEnable = OMX_TRUE; 1793 1794 err = mOMX->setParameter( 1795 mNode, index, ¶ms, sizeof(params)); 1796 } 1797 1798 if (err != OK) { 1799 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1800 "IDR frames. (err %d)", err); 1801 1802 return err; 1803 } 1804 } 1805 1806 // Only enable metadata mode on encoder output if encoder can prepend 1807 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1808 // opaque handle, to which we don't have access. 1809 int32_t video = !strncasecmp(mime, "video/", 6); 1810 mIsVideo = video; 1811 if (encoder && video) { 1812 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1813 && msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta) 1814 && storeMeta != 0); 1815 1816 mOutputMetadataType = kMetadataBufferTypeNativeHandleSource; 1817 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType); 1818 if (err != OK) { 1819 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1820 mComponentName.c_str(), err); 1821 } 1822 1823 if (!msg->findInt64( 1824 "repeat-previous-frame-after", 1825 &mRepeatFrameDelayUs)) { 1826 mRepeatFrameDelayUs = -1ll; 1827 } 1828 1829 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1830 mMaxPtsGapUs = -1ll; 1831 } 1832 1833 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1834 mMaxFps = -1; 1835 } 1836 1837 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1838 mTimePerCaptureUs = -1ll; 1839 } 1840 1841 if (!msg->findInt32( 1842 "create-input-buffers-suspended", 1843 (int32_t*)&mCreateInputBuffersSuspended)) { 1844 mCreateInputBuffersSuspended = false; 1845 } 1846 } 1847 1848 // NOTE: we only use native window for video decoders 1849 sp<RefBase> obj; 1850 bool haveNativeWindow = msg->findObject("native-window", &obj) 1851 && obj != NULL && video && !encoder; 1852 mUsingNativeWindow = haveNativeWindow; 1853 mLegacyAdaptiveExperiment = false; 1854 if (video && !encoder) { 1855 inputFormat->setInt32("adaptive-playback", false); 1856 1857 int32_t usageProtected; 1858 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1859 if (!haveNativeWindow) { 1860 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1861 return PERMISSION_DENIED; 1862 } 1863 mFlags |= kFlagIsGrallocUsageProtected; 1864 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1865 } 1866 1867 if (mFlags & kFlagIsSecure) { 1868 // use native_handles for secure input buffers 1869 err = mOMX->enableNativeBuffers( 1870 mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1871 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1872 err = OK; // ignore error for now 1873 } 1874 } 1875 if (haveNativeWindow) { 1876 sp<ANativeWindow> nativeWindow = 1877 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1878 1879 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1880 int32_t autoFrc; 1881 if (msg->findInt32("auto-frc", &autoFrc)) { 1882 bool enabled = autoFrc; 1883 OMX_CONFIG_BOOLEANTYPE config; 1884 InitOMXParams(&config); 1885 config.bEnabled = (OMX_BOOL)enabled; 1886 status_t temp = mOMX->setConfig( 1887 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1888 &config, sizeof(config)); 1889 if (temp == OK) { 1890 outputFormat->setInt32("auto-frc", enabled); 1891 } else if (enabled) { 1892 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1893 } 1894 } 1895 // END of temporary support for automatic FRC 1896 1897 int32_t tunneled; 1898 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1899 tunneled != 0) { 1900 ALOGI("Configuring TUNNELED video playback."); 1901 mTunneled = true; 1902 1903 int32_t audioHwSync = 0; 1904 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1905 ALOGW("No Audio HW Sync provided for video tunnel"); 1906 } 1907 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1908 if (err != OK) { 1909 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1910 audioHwSync, nativeWindow.get()); 1911 return err; 1912 } 1913 1914 int32_t maxWidth = 0, maxHeight = 0; 1915 if (msg->findInt32("max-width", &maxWidth) && 1916 msg->findInt32("max-height", &maxHeight)) { 1917 1918 err = mOMX->prepareForAdaptivePlayback( 1919 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1920 if (err != OK) { 1921 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1922 mComponentName.c_str(), err); 1923 // allow failure 1924 err = OK; 1925 } else { 1926 inputFormat->setInt32("max-width", maxWidth); 1927 inputFormat->setInt32("max-height", maxHeight); 1928 inputFormat->setInt32("adaptive-playback", true); 1929 } 1930 } 1931 } else { 1932 ALOGV("Configuring CPU controlled video playback."); 1933 mTunneled = false; 1934 1935 // Explicity reset the sideband handle of the window for 1936 // non-tunneled video in case the window was previously used 1937 // for a tunneled video playback. 1938 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1939 if (err != OK) { 1940 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1941 return err; 1942 } 1943 1944 // Always try to enable dynamic output buffers on native surface 1945 mOutputMetadataType = kMetadataBufferTypeANWBuffer; 1946 err = mOMX->storeMetaDataInBuffers( 1947 mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1948 if (err != OK) { 1949 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1950 mComponentName.c_str(), err); 1951 1952 // if adaptive playback has been requested, try JB fallback 1953 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1954 // LARGE MEMORY REQUIREMENT 1955 1956 // we will not do adaptive playback on software accessed 1957 // surfaces as they never had to respond to changes in the 1958 // crop window, and we don't trust that they will be able to. 1959 int usageBits = 0; 1960 bool canDoAdaptivePlayback; 1961 1962 if (nativeWindow->query( 1963 nativeWindow.get(), 1964 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1965 &usageBits) != OK) { 1966 canDoAdaptivePlayback = false; 1967 } else { 1968 canDoAdaptivePlayback = 1969 (usageBits & 1970 (GRALLOC_USAGE_SW_READ_MASK | 1971 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1972 } 1973 1974 int32_t maxWidth = 0, maxHeight = 0; 1975 if (canDoAdaptivePlayback && 1976 msg->findInt32("max-width", &maxWidth) && 1977 msg->findInt32("max-height", &maxHeight)) { 1978 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1979 mComponentName.c_str(), maxWidth, maxHeight); 1980 1981 err = mOMX->prepareForAdaptivePlayback( 1982 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1983 maxHeight); 1984 ALOGW_IF(err != OK, 1985 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1986 mComponentName.c_str(), err); 1987 1988 if (err == OK) { 1989 inputFormat->setInt32("max-width", maxWidth); 1990 inputFormat->setInt32("max-height", maxHeight); 1991 inputFormat->setInt32("adaptive-playback", true); 1992 } 1993 } 1994 // allow failure 1995 err = OK; 1996 } else { 1997 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1998 mComponentName.c_str()); 1999 CHECK(storingMetadataInDecodedBuffers()); 2000 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 2001 "legacy-adaptive", !msg->contains("no-experiments")); 2002 2003 inputFormat->setInt32("adaptive-playback", true); 2004 } 2005 2006 int32_t push; 2007 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 2008 && push != 0) { 2009 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 2010 } 2011 } 2012 2013 int32_t rotationDegrees; 2014 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 2015 mRotationDegrees = rotationDegrees; 2016 } else { 2017 mRotationDegrees = 0; 2018 } 2019 } 2020 2021 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 2022 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 2023 // invalid encodings will default to PCM-16bit in setupRawAudioFormat. 2024 2025 if (video) { 2026 // determine need for software renderer 2027 bool usingSwRenderer = false; 2028 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 2029 usingSwRenderer = true; 2030 haveNativeWindow = false; 2031 } 2032 2033 if (encoder) { 2034 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 2035 } else { 2036 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2037 } 2038 2039 if (err != OK) { 2040 return err; 2041 } 2042 2043 if (haveNativeWindow) { 2044 mNativeWindow = static_cast<Surface *>(obj.get()); 2045 } 2046 2047 // initialize native window now to get actual output format 2048 // TODO: this is needed for some encoders even though they don't use native window 2049 err = initNativeWindow(); 2050 if (err != OK) { 2051 return err; 2052 } 2053 2054 // fallback for devices that do not handle flex-YUV for native buffers 2055 if (haveNativeWindow) { 2056 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 2057 if (msg->findInt32("color-format", &requestedColorFormat) && 2058 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 2059 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 2060 if (err != OK) { 2061 return err; 2062 } 2063 int32_t colorFormat = OMX_COLOR_FormatUnused; 2064 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 2065 if (!outputFormat->findInt32("color-format", &colorFormat)) { 2066 ALOGE("ouptut port did not have a color format (wrong domain?)"); 2067 return BAD_VALUE; 2068 } 2069 ALOGD("[%s] Requested output format %#x and got %#x.", 2070 mComponentName.c_str(), requestedColorFormat, colorFormat); 2071 if (!isFlexibleColorFormat( 2072 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 2073 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 2074 // device did not handle flex-YUV request for native window, fall back 2075 // to SW renderer 2076 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 2077 mNativeWindow.clear(); 2078 mNativeWindowUsageBits = 0; 2079 haveNativeWindow = false; 2080 usingSwRenderer = true; 2081 if (storingMetadataInDecodedBuffers()) { 2082 err = mOMX->storeMetaDataInBuffers( 2083 mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 2084 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 2085 // TODO: implement adaptive-playback support for bytebuffer mode. 2086 // This is done by SW codecs, but most HW codecs don't support it. 2087 inputFormat->setInt32("adaptive-playback", false); 2088 } 2089 if (err == OK) { 2090 err = mOMX->enableNativeBuffers( 2091 mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 2092 } 2093 if (mFlags & kFlagIsGrallocUsageProtected) { 2094 // fallback is not supported for protected playback 2095 err = PERMISSION_DENIED; 2096 } else if (err == OK) { 2097 err = setupVideoDecoder( 2098 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2099 } 2100 } 2101 } 2102 } 2103 2104 if (usingSwRenderer) { 2105 outputFormat->setInt32("using-sw-renderer", 1); 2106 } 2107 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2108 int32_t numChannels, sampleRate; 2109 if (!msg->findInt32("channel-count", &numChannels) 2110 || !msg->findInt32("sample-rate", &sampleRate)) { 2111 // Since we did not always check for these, leave them optional 2112 // and have the decoder figure it all out. 2113 err = OK; 2114 } else { 2115 err = setupRawAudioFormat( 2116 encoder ? kPortIndexInput : kPortIndexOutput, 2117 sampleRate, 2118 numChannels); 2119 } 2120 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2121 int32_t numChannels, sampleRate; 2122 if (!msg->findInt32("channel-count", &numChannels) 2123 || !msg->findInt32("sample-rate", &sampleRate)) { 2124 err = INVALID_OPERATION; 2125 } else { 2126 int32_t isADTS, aacProfile; 2127 int32_t sbrMode; 2128 int32_t maxOutputChannelCount; 2129 int32_t pcmLimiterEnable; 2130 drcParams_t drc; 2131 if (!msg->findInt32("is-adts", &isADTS)) { 2132 isADTS = 0; 2133 } 2134 if (!msg->findInt32("aac-profile", &aacProfile)) { 2135 aacProfile = OMX_AUDIO_AACObjectNull; 2136 } 2137 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2138 sbrMode = -1; 2139 } 2140 2141 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2142 maxOutputChannelCount = -1; 2143 } 2144 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2145 // value is unknown 2146 pcmLimiterEnable = -1; 2147 } 2148 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2149 // value is unknown 2150 drc.encodedTargetLevel = -1; 2151 } 2152 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2153 // value is unknown 2154 drc.drcCut = -1; 2155 } 2156 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2157 // value is unknown 2158 drc.drcBoost = -1; 2159 } 2160 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2161 // value is unknown 2162 drc.heavyCompression = -1; 2163 } 2164 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2165 // value is unknown 2166 drc.targetRefLevel = -1; 2167 } 2168 2169 err = setupAACCodec( 2170 encoder, numChannels, sampleRate, bitRate, aacProfile, 2171 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2172 pcmLimiterEnable); 2173 } 2174 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2175 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2176 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2177 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2178 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2179 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2180 // These are PCM-like formats with a fixed sample rate but 2181 // a variable number of channels. 2182 2183 int32_t numChannels; 2184 if (!msg->findInt32("channel-count", &numChannels)) { 2185 err = INVALID_OPERATION; 2186 } else { 2187 int32_t sampleRate; 2188 if (!msg->findInt32("sample-rate", &sampleRate)) { 2189 sampleRate = 8000; 2190 } 2191 err = setupG711Codec(encoder, sampleRate, numChannels); 2192 } 2193 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2194 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2195 if (encoder && 2196 (!msg->findInt32("channel-count", &numChannels) 2197 || !msg->findInt32("sample-rate", &sampleRate))) { 2198 ALOGE("missing channel count or sample rate for FLAC encoder"); 2199 err = INVALID_OPERATION; 2200 } else { 2201 if (encoder) { 2202 if (!msg->findInt32( 2203 "complexity", &compressionLevel) && 2204 !msg->findInt32( 2205 "flac-compression-level", &compressionLevel)) { 2206 compressionLevel = 5; // default FLAC compression level 2207 } else if (compressionLevel < 0) { 2208 ALOGW("compression level %d outside [0..8] range, " 2209 "using 0", 2210 compressionLevel); 2211 compressionLevel = 0; 2212 } else if (compressionLevel > 8) { 2213 ALOGW("compression level %d outside [0..8] range, " 2214 "using 8", 2215 compressionLevel); 2216 compressionLevel = 8; 2217 } 2218 } 2219 err = setupFlacCodec( 2220 encoder, numChannels, sampleRate, compressionLevel); 2221 } 2222 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2223 int32_t numChannels, sampleRate; 2224 if (encoder 2225 || !msg->findInt32("channel-count", &numChannels) 2226 || !msg->findInt32("sample-rate", &sampleRate)) { 2227 err = INVALID_OPERATION; 2228 } else { 2229 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); 2230 } 2231 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2232 int32_t numChannels; 2233 int32_t sampleRate; 2234 if (!msg->findInt32("channel-count", &numChannels) 2235 || !msg->findInt32("sample-rate", &sampleRate)) { 2236 err = INVALID_OPERATION; 2237 } else { 2238 err = setupAC3Codec(encoder, numChannels, sampleRate); 2239 } 2240 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2241 int32_t numChannels; 2242 int32_t sampleRate; 2243 if (!msg->findInt32("channel-count", &numChannels) 2244 || !msg->findInt32("sample-rate", &sampleRate)) { 2245 err = INVALID_OPERATION; 2246 } else { 2247 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2248 } 2249 } 2250 2251 if (err != OK) { 2252 return err; 2253 } 2254 2255 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2256 mEncoderDelay = 0; 2257 } 2258 2259 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2260 mEncoderPadding = 0; 2261 } 2262 2263 if (msg->findInt32("channel-mask", &mChannelMask)) { 2264 mChannelMaskPresent = true; 2265 } else { 2266 mChannelMaskPresent = false; 2267 } 2268 2269 int32_t maxInputSize; 2270 if (msg->findInt32("max-input-size", &maxInputSize)) { 2271 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2272 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2273 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2274 } 2275 2276 int32_t priority; 2277 if (msg->findInt32("priority", &priority)) { 2278 err = setPriority(priority); 2279 } 2280 2281 int32_t rateInt = -1; 2282 float rateFloat = -1; 2283 if (!msg->findFloat("operating-rate", &rateFloat)) { 2284 msg->findInt32("operating-rate", &rateInt); 2285 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2286 } 2287 if (rateFloat > 0) { 2288 err = setOperatingRate(rateFloat, video); 2289 } 2290 2291 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame. 2292 mBaseOutputFormat = outputFormat; 2293 // trigger a kWhatOutputFormatChanged msg on first buffer 2294 mLastOutputFormat.clear(); 2295 2296 err = getPortFormat(kPortIndexInput, inputFormat); 2297 if (err == OK) { 2298 err = getPortFormat(kPortIndexOutput, outputFormat); 2299 if (err == OK) { 2300 mInputFormat = inputFormat; 2301 mOutputFormat = outputFormat; 2302 } 2303 } 2304 2305 // create data converters if needed 2306 if (!video && err == OK) { 2307 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 2308 if (encoder) { 2309 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2310 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); 2311 if (mConverter[kPortIndexInput] != NULL) { 2312 mInputFormat->setInt32("pcm-encoding", pcmEncoding); 2313 } 2314 } else { 2315 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2316 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 2317 if (mConverter[kPortIndexOutput] != NULL) { 2318 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 2319 } 2320 } 2321 } 2322 2323 return err; 2324} 2325 2326status_t ACodec::setPriority(int32_t priority) { 2327 if (priority < 0) { 2328 return BAD_VALUE; 2329 } 2330 OMX_PARAM_U32TYPE config; 2331 InitOMXParams(&config); 2332 config.nU32 = (OMX_U32)priority; 2333 status_t temp = mOMX->setConfig( 2334 mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2335 &config, sizeof(config)); 2336 if (temp != OK) { 2337 ALOGI("codec does not support config priority (err %d)", temp); 2338 } 2339 return OK; 2340} 2341 2342status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2343 if (rateFloat < 0) { 2344 return BAD_VALUE; 2345 } 2346 OMX_U32 rate; 2347 if (isVideo) { 2348 if (rateFloat > 65535) { 2349 return BAD_VALUE; 2350 } 2351 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2352 } else { 2353 if (rateFloat > UINT_MAX) { 2354 return BAD_VALUE; 2355 } 2356 rate = (OMX_U32)(rateFloat); 2357 } 2358 OMX_PARAM_U32TYPE config; 2359 InitOMXParams(&config); 2360 config.nU32 = rate; 2361 status_t err = mOMX->setConfig( 2362 mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2363 &config, sizeof(config)); 2364 if (err != OK) { 2365 ALOGI("codec does not support config operating rate (err %d)", err); 2366 } 2367 return OK; 2368} 2369 2370status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2371 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2372 InitOMXParams(¶ms); 2373 params.nPortIndex = kPortIndexOutput; 2374 status_t err = mOMX->getConfig( 2375 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2376 if (err == OK) { 2377 *intraRefreshPeriod = params.nRefreshPeriod; 2378 return OK; 2379 } 2380 2381 // Fallback to query through standard OMX index. 2382 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2383 InitOMXParams(&refreshParams); 2384 refreshParams.nPortIndex = kPortIndexOutput; 2385 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2386 err = mOMX->getParameter( 2387 mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2388 if (err != OK || refreshParams.nCirMBs == 0) { 2389 *intraRefreshPeriod = 0; 2390 return OK; 2391 } 2392 2393 // Calculate period based on width and height 2394 uint32_t width, height; 2395 OMX_PARAM_PORTDEFINITIONTYPE def; 2396 InitOMXParams(&def); 2397 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2398 def.nPortIndex = kPortIndexOutput; 2399 err = mOMX->getParameter( 2400 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2401 if (err != OK) { 2402 *intraRefreshPeriod = 0; 2403 return err; 2404 } 2405 width = video_def->nFrameWidth; 2406 height = video_def->nFrameHeight; 2407 // Use H.264/AVC MacroBlock size 16x16 2408 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2409 2410 return OK; 2411} 2412 2413status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2414 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2415 InitOMXParams(¶ms); 2416 params.nPortIndex = kPortIndexOutput; 2417 params.nRefreshPeriod = intraRefreshPeriod; 2418 status_t err = mOMX->setConfig( 2419 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2420 if (err == OK) { 2421 return OK; 2422 } 2423 2424 // Only in configure state, a component could invoke setParameter. 2425 if (!inConfigure) { 2426 return INVALID_OPERATION; 2427 } else { 2428 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2429 } 2430 2431 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2432 InitOMXParams(&refreshParams); 2433 refreshParams.nPortIndex = kPortIndexOutput; 2434 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2435 2436 if (intraRefreshPeriod == 0) { 2437 // 0 means disable intra refresh. 2438 refreshParams.nCirMBs = 0; 2439 } else { 2440 // Calculate macroblocks that need to be intra coded base on width and height 2441 uint32_t width, height; 2442 OMX_PARAM_PORTDEFINITIONTYPE def; 2443 InitOMXParams(&def); 2444 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2445 def.nPortIndex = kPortIndexOutput; 2446 err = mOMX->getParameter( 2447 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2448 if (err != OK) { 2449 return err; 2450 } 2451 width = video_def->nFrameWidth; 2452 height = video_def->nFrameHeight; 2453 // Use H.264/AVC MacroBlock size 16x16 2454 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2455 } 2456 2457 err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh, 2458 &refreshParams, sizeof(refreshParams)); 2459 if (err != OK) { 2460 return err; 2461 } 2462 2463 return OK; 2464} 2465 2466status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2467 OMX_PARAM_PORTDEFINITIONTYPE def; 2468 InitOMXParams(&def); 2469 def.nPortIndex = portIndex; 2470 2471 status_t err = mOMX->getParameter( 2472 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2473 2474 if (err != OK) { 2475 return err; 2476 } 2477 2478 if (def.nBufferSize >= size) { 2479 return OK; 2480 } 2481 2482 def.nBufferSize = size; 2483 2484 err = mOMX->setParameter( 2485 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2486 2487 if (err != OK) { 2488 return err; 2489 } 2490 2491 err = mOMX->getParameter( 2492 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2493 2494 if (err != OK) { 2495 return err; 2496 } 2497 2498 if (def.nBufferSize < size) { 2499 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2500 return FAILED_TRANSACTION; 2501 } 2502 2503 return OK; 2504} 2505 2506status_t ACodec::selectAudioPortFormat( 2507 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2508 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2509 InitOMXParams(&format); 2510 2511 format.nPortIndex = portIndex; 2512 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2513 format.nIndex = index; 2514 status_t err = mOMX->getParameter( 2515 mNode, OMX_IndexParamAudioPortFormat, 2516 &format, sizeof(format)); 2517 2518 if (err != OK) { 2519 return err; 2520 } 2521 2522 if (format.eEncoding == desiredFormat) { 2523 break; 2524 } 2525 2526 if (index == kMaxIndicesToCheck) { 2527 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2528 mComponentName.c_str(), index, 2529 asString(format.eEncoding), format.eEncoding); 2530 return ERROR_UNSUPPORTED; 2531 } 2532 } 2533 2534 return mOMX->setParameter( 2535 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2536} 2537 2538status_t ACodec::setupAACCodec( 2539 bool encoder, int32_t numChannels, int32_t sampleRate, 2540 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2541 int32_t maxOutputChannelCount, const drcParams_t& drc, 2542 int32_t pcmLimiterEnable) { 2543 if (encoder && isADTS) { 2544 return -EINVAL; 2545 } 2546 2547 status_t err = setupRawAudioFormat( 2548 encoder ? kPortIndexInput : kPortIndexOutput, 2549 sampleRate, 2550 numChannels); 2551 2552 if (err != OK) { 2553 return err; 2554 } 2555 2556 if (encoder) { 2557 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2558 2559 if (err != OK) { 2560 return err; 2561 } 2562 2563 OMX_PARAM_PORTDEFINITIONTYPE def; 2564 InitOMXParams(&def); 2565 def.nPortIndex = kPortIndexOutput; 2566 2567 err = mOMX->getParameter( 2568 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2569 2570 if (err != OK) { 2571 return err; 2572 } 2573 2574 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2575 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2576 2577 err = mOMX->setParameter( 2578 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2579 2580 if (err != OK) { 2581 return err; 2582 } 2583 2584 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2585 InitOMXParams(&profile); 2586 profile.nPortIndex = kPortIndexOutput; 2587 2588 err = mOMX->getParameter( 2589 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2590 2591 if (err != OK) { 2592 return err; 2593 } 2594 2595 profile.nChannels = numChannels; 2596 2597 profile.eChannelMode = 2598 (numChannels == 1) 2599 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2600 2601 profile.nSampleRate = sampleRate; 2602 profile.nBitRate = bitRate; 2603 profile.nAudioBandWidth = 0; 2604 profile.nFrameLength = 0; 2605 profile.nAACtools = OMX_AUDIO_AACToolAll; 2606 profile.nAACERtools = OMX_AUDIO_AACERNone; 2607 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2608 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2609 switch (sbrMode) { 2610 case 0: 2611 // disable sbr 2612 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2613 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2614 break; 2615 case 1: 2616 // enable single-rate sbr 2617 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2618 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2619 break; 2620 case 2: 2621 // enable dual-rate sbr 2622 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2623 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2624 break; 2625 case -1: 2626 // enable both modes -> the codec will decide which mode should be used 2627 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2628 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2629 break; 2630 default: 2631 // unsupported sbr mode 2632 return BAD_VALUE; 2633 } 2634 2635 2636 err = mOMX->setParameter( 2637 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2638 2639 if (err != OK) { 2640 return err; 2641 } 2642 2643 return err; 2644 } 2645 2646 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2647 InitOMXParams(&profile); 2648 profile.nPortIndex = kPortIndexInput; 2649 2650 err = mOMX->getParameter( 2651 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2652 2653 if (err != OK) { 2654 return err; 2655 } 2656 2657 profile.nChannels = numChannels; 2658 profile.nSampleRate = sampleRate; 2659 2660 profile.eAACStreamFormat = 2661 isADTS 2662 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2663 : OMX_AUDIO_AACStreamFormatMP4FF; 2664 2665 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2666 InitOMXParams(&presentation); 2667 presentation.nMaxOutputChannels = maxOutputChannelCount; 2668 presentation.nDrcCut = drc.drcCut; 2669 presentation.nDrcBoost = drc.drcBoost; 2670 presentation.nHeavyCompression = drc.heavyCompression; 2671 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2672 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2673 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2674 2675 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2676 if (res == OK) { 2677 // optional parameters, will not cause configuration failure 2678 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2679 &presentation, sizeof(presentation)); 2680 } else { 2681 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2682 } 2683 mSampleRate = sampleRate; 2684 return res; 2685} 2686 2687status_t ACodec::setupAC3Codec( 2688 bool encoder, int32_t numChannels, int32_t sampleRate) { 2689 status_t err = setupRawAudioFormat( 2690 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2691 2692 if (err != OK) { 2693 return err; 2694 } 2695 2696 if (encoder) { 2697 ALOGW("AC3 encoding is not supported."); 2698 return INVALID_OPERATION; 2699 } 2700 2701 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2702 InitOMXParams(&def); 2703 def.nPortIndex = kPortIndexInput; 2704 2705 err = mOMX->getParameter( 2706 mNode, 2707 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2708 &def, 2709 sizeof(def)); 2710 2711 if (err != OK) { 2712 return err; 2713 } 2714 2715 def.nChannels = numChannels; 2716 def.nSampleRate = sampleRate; 2717 2718 return mOMX->setParameter( 2719 mNode, 2720 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2721 &def, 2722 sizeof(def)); 2723} 2724 2725status_t ACodec::setupEAC3Codec( 2726 bool encoder, int32_t numChannels, int32_t sampleRate) { 2727 status_t err = setupRawAudioFormat( 2728 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2729 2730 if (err != OK) { 2731 return err; 2732 } 2733 2734 if (encoder) { 2735 ALOGW("EAC3 encoding is not supported."); 2736 return INVALID_OPERATION; 2737 } 2738 2739 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2740 InitOMXParams(&def); 2741 def.nPortIndex = kPortIndexInput; 2742 2743 err = mOMX->getParameter( 2744 mNode, 2745 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2746 &def, 2747 sizeof(def)); 2748 2749 if (err != OK) { 2750 return err; 2751 } 2752 2753 def.nChannels = numChannels; 2754 def.nSampleRate = sampleRate; 2755 2756 return mOMX->setParameter( 2757 mNode, 2758 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2759 &def, 2760 sizeof(def)); 2761} 2762 2763static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2764 bool isAMRWB, int32_t bps) { 2765 if (isAMRWB) { 2766 if (bps <= 6600) { 2767 return OMX_AUDIO_AMRBandModeWB0; 2768 } else if (bps <= 8850) { 2769 return OMX_AUDIO_AMRBandModeWB1; 2770 } else if (bps <= 12650) { 2771 return OMX_AUDIO_AMRBandModeWB2; 2772 } else if (bps <= 14250) { 2773 return OMX_AUDIO_AMRBandModeWB3; 2774 } else if (bps <= 15850) { 2775 return OMX_AUDIO_AMRBandModeWB4; 2776 } else if (bps <= 18250) { 2777 return OMX_AUDIO_AMRBandModeWB5; 2778 } else if (bps <= 19850) { 2779 return OMX_AUDIO_AMRBandModeWB6; 2780 } else if (bps <= 23050) { 2781 return OMX_AUDIO_AMRBandModeWB7; 2782 } 2783 2784 // 23850 bps 2785 return OMX_AUDIO_AMRBandModeWB8; 2786 } else { // AMRNB 2787 if (bps <= 4750) { 2788 return OMX_AUDIO_AMRBandModeNB0; 2789 } else if (bps <= 5150) { 2790 return OMX_AUDIO_AMRBandModeNB1; 2791 } else if (bps <= 5900) { 2792 return OMX_AUDIO_AMRBandModeNB2; 2793 } else if (bps <= 6700) { 2794 return OMX_AUDIO_AMRBandModeNB3; 2795 } else if (bps <= 7400) { 2796 return OMX_AUDIO_AMRBandModeNB4; 2797 } else if (bps <= 7950) { 2798 return OMX_AUDIO_AMRBandModeNB5; 2799 } else if (bps <= 10200) { 2800 return OMX_AUDIO_AMRBandModeNB6; 2801 } 2802 2803 // 12200 bps 2804 return OMX_AUDIO_AMRBandModeNB7; 2805 } 2806} 2807 2808status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2809 OMX_AUDIO_PARAM_AMRTYPE def; 2810 InitOMXParams(&def); 2811 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2812 2813 status_t err = 2814 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2815 2816 if (err != OK) { 2817 return err; 2818 } 2819 2820 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2821 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2822 2823 err = mOMX->setParameter( 2824 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2825 2826 if (err != OK) { 2827 return err; 2828 } 2829 2830 return setupRawAudioFormat( 2831 encoder ? kPortIndexInput : kPortIndexOutput, 2832 isWAMR ? 16000 : 8000 /* sampleRate */, 2833 1 /* numChannels */); 2834} 2835 2836status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2837 if (encoder) { 2838 return INVALID_OPERATION; 2839 } 2840 2841 return setupRawAudioFormat( 2842 kPortIndexInput, sampleRate, numChannels); 2843} 2844 2845status_t ACodec::setupFlacCodec( 2846 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2847 2848 if (encoder) { 2849 OMX_AUDIO_PARAM_FLACTYPE def; 2850 InitOMXParams(&def); 2851 def.nPortIndex = kPortIndexOutput; 2852 2853 // configure compression level 2854 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2855 if (err != OK) { 2856 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2857 return err; 2858 } 2859 def.nCompressionLevel = compressionLevel; 2860 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2861 if (err != OK) { 2862 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2863 return err; 2864 } 2865 } 2866 2867 return setupRawAudioFormat( 2868 encoder ? kPortIndexInput : kPortIndexOutput, 2869 sampleRate, 2870 numChannels); 2871} 2872 2873status_t ACodec::setupRawAudioFormat( 2874 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { 2875 OMX_PARAM_PORTDEFINITIONTYPE def; 2876 InitOMXParams(&def); 2877 def.nPortIndex = portIndex; 2878 2879 status_t err = mOMX->getParameter( 2880 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2881 2882 if (err != OK) { 2883 return err; 2884 } 2885 2886 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2887 2888 err = mOMX->setParameter( 2889 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2890 2891 if (err != OK) { 2892 return err; 2893 } 2894 2895 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2896 InitOMXParams(&pcmParams); 2897 pcmParams.nPortIndex = portIndex; 2898 2899 err = mOMX->getParameter( 2900 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2901 2902 if (err != OK) { 2903 return err; 2904 } 2905 2906 pcmParams.nChannels = numChannels; 2907 switch (encoding) { 2908 case kAudioEncodingPcm8bit: 2909 pcmParams.eNumData = OMX_NumericalDataUnsigned; 2910 pcmParams.nBitPerSample = 8; 2911 break; 2912 case kAudioEncodingPcmFloat: 2913 pcmParams.eNumData = OMX_NumericalDataFloat; 2914 pcmParams.nBitPerSample = 32; 2915 break; 2916 case kAudioEncodingPcm16bit: 2917 pcmParams.eNumData = OMX_NumericalDataSigned; 2918 pcmParams.nBitPerSample = 16; 2919 break; 2920 default: 2921 return BAD_VALUE; 2922 } 2923 pcmParams.bInterleaved = OMX_TRUE; 2924 pcmParams.nSamplingRate = sampleRate; 2925 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 2926 2927 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 2928 return OMX_ErrorNone; 2929 } 2930 2931 err = mOMX->setParameter( 2932 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2933 // if we could not set up raw format to non-16-bit, try with 16-bit 2934 // NOTE: we will also verify this via readback, in case codec ignores these fields 2935 if (err != OK && encoding != kAudioEncodingPcm16bit) { 2936 pcmParams.eNumData = OMX_NumericalDataSigned; 2937 pcmParams.nBitPerSample = 16; 2938 err = mOMX->setParameter( 2939 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2940 } 2941 return err; 2942} 2943 2944status_t ACodec::configureTunneledVideoPlayback( 2945 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 2946 native_handle_t* sidebandHandle; 2947 2948 status_t err = mOMX->configureVideoTunnelMode( 2949 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 2950 if (err != OK) { 2951 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 2952 return err; 2953 } 2954 2955 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 2956 if (err != OK) { 2957 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 2958 sidebandHandle, err); 2959 return err; 2960 } 2961 2962 return OK; 2963} 2964 2965status_t ACodec::setVideoPortFormatType( 2966 OMX_U32 portIndex, 2967 OMX_VIDEO_CODINGTYPE compressionFormat, 2968 OMX_COLOR_FORMATTYPE colorFormat, 2969 bool usingNativeBuffers) { 2970 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2971 InitOMXParams(&format); 2972 format.nPortIndex = portIndex; 2973 format.nIndex = 0; 2974 bool found = false; 2975 2976 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2977 format.nIndex = index; 2978 status_t err = mOMX->getParameter( 2979 mNode, OMX_IndexParamVideoPortFormat, 2980 &format, sizeof(format)); 2981 2982 if (err != OK) { 2983 return err; 2984 } 2985 2986 // substitute back flexible color format to codec supported format 2987 OMX_U32 flexibleEquivalent; 2988 if (compressionFormat == OMX_VIDEO_CodingUnused 2989 && isFlexibleColorFormat( 2990 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 2991 && colorFormat == flexibleEquivalent) { 2992 ALOGI("[%s] using color format %#x in place of %#x", 2993 mComponentName.c_str(), format.eColorFormat, colorFormat); 2994 colorFormat = format.eColorFormat; 2995 } 2996 2997 // The following assertion is violated by TI's video decoder. 2998 // CHECK_EQ(format.nIndex, index); 2999 3000 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 3001 if (portIndex == kPortIndexInput 3002 && colorFormat == format.eColorFormat) { 3003 // eCompressionFormat does not seem right. 3004 found = true; 3005 break; 3006 } 3007 if (portIndex == kPortIndexOutput 3008 && compressionFormat == format.eCompressionFormat) { 3009 // eColorFormat does not seem right. 3010 found = true; 3011 break; 3012 } 3013 } 3014 3015 if (format.eCompressionFormat == compressionFormat 3016 && format.eColorFormat == colorFormat) { 3017 found = true; 3018 break; 3019 } 3020 3021 if (index == kMaxIndicesToCheck) { 3022 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 3023 mComponentName.c_str(), index, 3024 asString(format.eCompressionFormat), format.eCompressionFormat, 3025 asString(format.eColorFormat), format.eColorFormat); 3026 } 3027 } 3028 3029 if (!found) { 3030 return UNKNOWN_ERROR; 3031 } 3032 3033 status_t err = mOMX->setParameter( 3034 mNode, OMX_IndexParamVideoPortFormat, 3035 &format, sizeof(format)); 3036 3037 return err; 3038} 3039 3040// Set optimal output format. OMX component lists output formats in the order 3041// of preference, but this got more complicated since the introduction of flexible 3042// YUV formats. We support a legacy behavior for applications that do not use 3043// surface output, do not specify an output format, but expect a "usable" standard 3044// OMX format. SW readable and standard formats must be flex-YUV. 3045// 3046// Suggested preference order: 3047// - optimal format for texture rendering (mediaplayer behavior) 3048// - optimal SW readable & texture renderable format (flex-YUV support) 3049// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 3050// - legacy "usable" standard formats 3051// 3052// For legacy support, we prefer a standard format, but will settle for a SW readable 3053// flex-YUV format. 3054status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 3055 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 3056 InitOMXParams(&format); 3057 format.nPortIndex = kPortIndexOutput; 3058 3059 InitOMXParams(&legacyFormat); 3060 // this field will change when we find a suitable legacy format 3061 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 3062 3063 for (OMX_U32 index = 0; ; ++index) { 3064 format.nIndex = index; 3065 status_t err = mOMX->getParameter( 3066 mNode, OMX_IndexParamVideoPortFormat, 3067 &format, sizeof(format)); 3068 if (err != OK) { 3069 // no more formats, pick legacy format if found 3070 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 3071 memcpy(&format, &legacyFormat, sizeof(format)); 3072 break; 3073 } 3074 return err; 3075 } 3076 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 3077 return OMX_ErrorBadParameter; 3078 } 3079 if (!getLegacyFlexibleFormat) { 3080 break; 3081 } 3082 // standard formats that were exposed to users before 3083 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 3084 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 3085 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 3086 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 3087 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 3088 break; 3089 } 3090 // find best legacy non-standard format 3091 OMX_U32 flexibleEquivalent; 3092 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 3093 && isFlexibleColorFormat( 3094 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */, 3095 &flexibleEquivalent) 3096 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 3097 memcpy(&legacyFormat, &format, sizeof(format)); 3098 } 3099 } 3100 return mOMX->setParameter( 3101 mNode, OMX_IndexParamVideoPortFormat, 3102 &format, sizeof(format)); 3103} 3104 3105static const struct VideoCodingMapEntry { 3106 const char *mMime; 3107 OMX_VIDEO_CODINGTYPE mVideoCodingType; 3108} kVideoCodingMapEntry[] = { 3109 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 3110 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 3111 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 3112 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 3113 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 3114 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 3115 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 3116 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 3117}; 3118 3119static status_t GetVideoCodingTypeFromMime( 3120 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3121 for (size_t i = 0; 3122 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3123 ++i) { 3124 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3125 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3126 return OK; 3127 } 3128 } 3129 3130 *codingType = OMX_VIDEO_CodingUnused; 3131 3132 return ERROR_UNSUPPORTED; 3133} 3134 3135static status_t GetMimeTypeForVideoCoding( 3136 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3137 for (size_t i = 0; 3138 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3139 ++i) { 3140 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3141 *mime = kVideoCodingMapEntry[i].mMime; 3142 return OK; 3143 } 3144 } 3145 3146 mime->clear(); 3147 3148 return ERROR_UNSUPPORTED; 3149} 3150 3151status_t ACodec::setPortBufferNum(OMX_U32 portIndex, int bufferNum) { 3152 OMX_PARAM_PORTDEFINITIONTYPE def; 3153 InitOMXParams(&def); 3154 def.nPortIndex = portIndex; 3155 status_t err; 3156 ALOGD("Setting [%s] %s port buffer number: %d", mComponentName.c_str(), 3157 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3158 err = mOMX->getParameter( 3159 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3160 if (err != OK) { 3161 return err; 3162 } 3163 def.nBufferCountActual = bufferNum; 3164 err = mOMX->setParameter( 3165 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3166 if (err != OK) { 3167 // Component could reject this request. 3168 ALOGW("Fail to set [%s] %s port buffer number: %d", mComponentName.c_str(), 3169 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3170 } 3171 return OK; 3172} 3173 3174status_t ACodec::setupVideoDecoder( 3175 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3176 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3177 int32_t width, height; 3178 if (!msg->findInt32("width", &width) 3179 || !msg->findInt32("height", &height)) { 3180 return INVALID_OPERATION; 3181 } 3182 3183 OMX_VIDEO_CODINGTYPE compressionFormat; 3184 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3185 3186 if (err != OK) { 3187 return err; 3188 } 3189 3190 if (compressionFormat == OMX_VIDEO_CodingVP9) { 3191 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 3192 InitOMXParams(¶ms); 3193 params.nPortIndex = kPortIndexInput; 3194 // Check if VP9 decoder advertises supported profiles. 3195 params.nProfileIndex = 0; 3196 status_t err = mOMX->getParameter( 3197 mNode, 3198 OMX_IndexParamVideoProfileLevelQuerySupported, 3199 ¶ms, 3200 sizeof(params)); 3201 mIsLegacyVP9Decoder = err != OK; 3202 } 3203 3204 err = setVideoPortFormatType( 3205 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3206 3207 if (err != OK) { 3208 return err; 3209 } 3210 3211 int32_t tmp; 3212 if (msg->findInt32("color-format", &tmp)) { 3213 OMX_COLOR_FORMATTYPE colorFormat = 3214 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3215 err = setVideoPortFormatType( 3216 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3217 if (err != OK) { 3218 ALOGW("[%s] does not support color format %d", 3219 mComponentName.c_str(), colorFormat); 3220 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3221 } 3222 } else { 3223 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3224 } 3225 3226 if (err != OK) { 3227 return err; 3228 } 3229 3230 // Set the component input buffer number to be |tmp|. If succeed, 3231 // component will set input port buffer number to be |tmp|. If fail, 3232 // component will keep the same buffer number as before. 3233 if (msg->findInt32("android._num-input-buffers", &tmp)) { 3234 err = setPortBufferNum(kPortIndexInput, tmp); 3235 if (err != OK) 3236 return err; 3237 } 3238 3239 // Set the component output buffer number to be |tmp|. If succeed, 3240 // component will set output port buffer number to be |tmp|. If fail, 3241 // component will keep the same buffer number as before. 3242 if (msg->findInt32("android._num-output-buffers", &tmp)) { 3243 err = setPortBufferNum(kPortIndexOutput, tmp); 3244 if (err != OK) 3245 return err; 3246 } 3247 3248 int32_t frameRateInt; 3249 float frameRateFloat; 3250 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3251 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3252 frameRateInt = -1; 3253 } 3254 frameRateFloat = (float)frameRateInt; 3255 } 3256 3257 err = setVideoFormatOnPort( 3258 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3259 3260 if (err != OK) { 3261 return err; 3262 } 3263 3264 err = setVideoFormatOnPort( 3265 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3266 3267 if (err != OK) { 3268 return err; 3269 } 3270 3271 err = setColorAspectsForVideoDecoder( 3272 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3273 if (err == ERROR_UNSUPPORTED) { // support is optional 3274 err = OK; 3275 } 3276 3277 if (err != OK) { 3278 return err; 3279 } 3280 3281 err = setHDRStaticInfoForVideoCodec(kPortIndexOutput, msg, outputFormat); 3282 if (err == ERROR_UNSUPPORTED) { // support is optional 3283 err = OK; 3284 } 3285 return err; 3286} 3287 3288status_t ACodec::initDescribeColorAspectsIndex() { 3289 status_t err = mOMX->getExtensionIndex( 3290 mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3291 if (err != OK) { 3292 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3293 } 3294 return err; 3295} 3296 3297status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3298 status_t err = ERROR_UNSUPPORTED; 3299 if (mDescribeColorAspectsIndex) { 3300 err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3301 } 3302 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3303 mComponentName.c_str(), 3304 params.sAspects.mRange, asString(params.sAspects.mRange), 3305 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3306 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3307 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3308 err, asString(err)); 3309 3310 if (verify && err == OK) { 3311 err = getCodecColorAspects(params); 3312 } 3313 3314 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3315 "[%s] setting color aspects failed even though codec advertises support", 3316 mComponentName.c_str()); 3317 return err; 3318} 3319 3320status_t ACodec::setColorAspectsForVideoDecoder( 3321 int32_t width, int32_t height, bool usingNativeWindow, 3322 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3323 DescribeColorAspectsParams params; 3324 InitOMXParams(¶ms); 3325 params.nPortIndex = kPortIndexOutput; 3326 3327 getColorAspectsFromFormat(configFormat, params.sAspects); 3328 if (usingNativeWindow) { 3329 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3330 // The default aspects will be set back to the output format during the 3331 // getFormat phase of configure(). Set non-Unspecified values back into the 3332 // format, in case component does not support this enumeration. 3333 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3334 } 3335 3336 (void)initDescribeColorAspectsIndex(); 3337 3338 // communicate color aspects to codec 3339 return setCodecColorAspects(params); 3340} 3341 3342status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3343 status_t err = ERROR_UNSUPPORTED; 3344 if (mDescribeColorAspectsIndex) { 3345 err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3346 } 3347 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3348 mComponentName.c_str(), 3349 params.sAspects.mRange, asString(params.sAspects.mRange), 3350 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3351 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3352 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3353 err, asString(err)); 3354 if (params.bRequestingDataSpace) { 3355 ALOGV("for dataspace %#x", params.nDataSpace); 3356 } 3357 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3358 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3359 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3360 mComponentName.c_str()); 3361 } 3362 return err; 3363} 3364 3365status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3366 DescribeColorAspectsParams params; 3367 InitOMXParams(¶ms); 3368 params.nPortIndex = kPortIndexInput; 3369 status_t err = getCodecColorAspects(params); 3370 if (err == OK) { 3371 // we only set encoder input aspects if codec supports them 3372 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3373 } 3374 return err; 3375} 3376 3377status_t ACodec::getDataSpace( 3378 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3379 bool tryCodec) { 3380 status_t err = OK; 3381 if (tryCodec) { 3382 // request dataspace guidance from codec. 3383 params.bRequestingDataSpace = OMX_TRUE; 3384 err = getCodecColorAspects(params); 3385 params.bRequestingDataSpace = OMX_FALSE; 3386 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3387 *dataSpace = (android_dataspace)params.nDataSpace; 3388 return err; 3389 } else if (err == ERROR_UNSUPPORTED) { 3390 // ignore not-implemented error for dataspace requests 3391 err = OK; 3392 } 3393 } 3394 3395 // this returns legacy versions if available 3396 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3397 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3398 "and dataspace %#x", 3399 mComponentName.c_str(), 3400 params.sAspects.mRange, asString(params.sAspects.mRange), 3401 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3402 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3403 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3404 *dataSpace); 3405 return err; 3406} 3407 3408 3409status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3410 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3411 android_dataspace *dataSpace) { 3412 DescribeColorAspectsParams params; 3413 InitOMXParams(¶ms); 3414 params.nPortIndex = kPortIndexOutput; 3415 3416 // reset default format and get resulting format 3417 getColorAspectsFromFormat(configFormat, params.sAspects); 3418 if (dataSpace != NULL) { 3419 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3420 } 3421 status_t err = setCodecColorAspects(params, true /* readBack */); 3422 3423 // we always set specified aspects for decoders 3424 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3425 3426 if (dataSpace != NULL) { 3427 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3428 if (err == OK) { 3429 err = res; 3430 } 3431 } 3432 3433 return err; 3434} 3435 3436// initial video encoder setup for bytebuffer mode 3437status_t ACodec::setColorAspectsForVideoEncoder( 3438 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3439 // copy config to output format as this is not exposed via getFormat 3440 copyColorConfig(configFormat, outputFormat); 3441 3442 DescribeColorAspectsParams params; 3443 InitOMXParams(¶ms); 3444 params.nPortIndex = kPortIndexInput; 3445 getColorAspectsFromFormat(configFormat, params.sAspects); 3446 3447 (void)initDescribeColorAspectsIndex(); 3448 3449 int32_t usingRecorder; 3450 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3451 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3452 int32_t width, height; 3453 if (configFormat->findInt32("width", &width) 3454 && configFormat->findInt32("height", &height)) { 3455 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3456 status_t err = getDataSpace( 3457 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3458 if (err != OK) { 3459 return err; 3460 } 3461 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3462 } 3463 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3464 } 3465 3466 // communicate color aspects to codec, but do not allow change of the platform aspects 3467 ColorAspects origAspects = params.sAspects; 3468 for (int triesLeft = 2; --triesLeft >= 0; ) { 3469 status_t err = setCodecColorAspects(params, true /* readBack */); 3470 if (err != OK 3471 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3472 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3473 return err; 3474 } 3475 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3476 mComponentName.c_str()); 3477 } 3478 return OK; 3479} 3480 3481status_t ACodec::setHDRStaticInfoForVideoCodec( 3482 OMX_U32 portIndex, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3483 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3484 3485 DescribeHDRStaticInfoParams params; 3486 InitOMXParams(¶ms); 3487 params.nPortIndex = portIndex; 3488 3489 HDRStaticInfo *info = ¶ms.sInfo; 3490 if (getHDRStaticInfoFromFormat(configFormat, info)) { 3491 setHDRStaticInfoIntoFormat(params.sInfo, outputFormat); 3492 } 3493 3494 (void)initDescribeHDRStaticInfoIndex(); 3495 3496 // communicate HDR static Info to codec 3497 return setHDRStaticInfo(params); 3498} 3499 3500// subsequent initial video encoder setup for surface mode 3501status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3502 android_dataspace *dataSpace /* nonnull */) { 3503 DescribeColorAspectsParams params; 3504 InitOMXParams(¶ms); 3505 params.nPortIndex = kPortIndexInput; 3506 ColorAspects &aspects = params.sAspects; 3507 3508 // reset default format and store resulting format into both input and output formats 3509 getColorAspectsFromFormat(mConfigFormat, aspects); 3510 int32_t width, height; 3511 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3512 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3513 } 3514 setColorAspectsIntoFormat(aspects, mInputFormat); 3515 setColorAspectsIntoFormat(aspects, mOutputFormat); 3516 3517 // communicate color aspects to codec, but do not allow any change 3518 ColorAspects origAspects = aspects; 3519 status_t err = OK; 3520 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3521 status_t err = setCodecColorAspects(params, true /* readBack */); 3522 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3523 break; 3524 } 3525 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3526 mComponentName.c_str()); 3527 } 3528 3529 *dataSpace = HAL_DATASPACE_BT709; 3530 aspects = origAspects; // restore desired color aspects 3531 status_t res = getDataSpace( 3532 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3533 if (err == OK) { 3534 err = res; 3535 } 3536 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3537 mInputFormat->setBuffer( 3538 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3539 3540 // update input format with codec supported color aspects (basically set unsupported 3541 // aspects to Unspecified) 3542 if (err == OK) { 3543 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3544 } 3545 3546 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3547 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3548 3549 return err; 3550} 3551 3552status_t ACodec::getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format) { 3553 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3554 DescribeHDRStaticInfoParams params; 3555 InitOMXParams(¶ms); 3556 params.nPortIndex = portIndex; 3557 3558 status_t err = getHDRStaticInfo(params); 3559 if (err == OK) { 3560 // we only set decodec output HDRStaticInfo if codec supports them 3561 setHDRStaticInfoIntoFormat(params.sInfo, format); 3562 } 3563 return err; 3564} 3565 3566status_t ACodec::initDescribeHDRStaticInfoIndex() { 3567 status_t err = mOMX->getExtensionIndex( 3568 mNode, "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex); 3569 if (err != OK) { 3570 mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0; 3571 } 3572 return err; 3573} 3574 3575status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) { 3576 status_t err = ERROR_UNSUPPORTED; 3577 if (mDescribeHDRStaticInfoIndex) { 3578 err = mOMX->setConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3579 } 3580 3581 const HDRStaticInfo *info = ¶ms.sInfo; 3582 ALOGV("[%s] setting HDRStaticInfo (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, " 3583 "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)", 3584 mComponentName.c_str(), 3585 info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y, 3586 info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y, 3587 info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance, 3588 info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel); 3589 3590 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3591 "[%s] setting HDRStaticInfo failed even though codec advertises support", 3592 mComponentName.c_str()); 3593 return err; 3594} 3595 3596status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms) { 3597 status_t err = ERROR_UNSUPPORTED; 3598 if (mDescribeHDRStaticInfoIndex) { 3599 err = mOMX->getConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3600 } 3601 3602 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3603 "[%s] getting HDRStaticInfo failed even though codec advertises support", 3604 mComponentName.c_str()); 3605 return err; 3606} 3607 3608status_t ACodec::setupVideoEncoder( 3609 const char *mime, const sp<AMessage> &msg, 3610 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3611 int32_t tmp; 3612 if (!msg->findInt32("color-format", &tmp)) { 3613 return INVALID_OPERATION; 3614 } 3615 3616 OMX_COLOR_FORMATTYPE colorFormat = 3617 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3618 3619 status_t err = setVideoPortFormatType( 3620 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3621 3622 if (err != OK) { 3623 ALOGE("[%s] does not support color format %d", 3624 mComponentName.c_str(), colorFormat); 3625 3626 return err; 3627 } 3628 3629 /* Input port configuration */ 3630 3631 OMX_PARAM_PORTDEFINITIONTYPE def; 3632 InitOMXParams(&def); 3633 3634 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3635 3636 def.nPortIndex = kPortIndexInput; 3637 3638 err = mOMX->getParameter( 3639 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3640 3641 if (err != OK) { 3642 return err; 3643 } 3644 3645 int32_t width, height, bitrate; 3646 if (!msg->findInt32("width", &width) 3647 || !msg->findInt32("height", &height) 3648 || !msg->findInt32("bitrate", &bitrate)) { 3649 return INVALID_OPERATION; 3650 } 3651 3652 video_def->nFrameWidth = width; 3653 video_def->nFrameHeight = height; 3654 3655 int32_t stride; 3656 if (!msg->findInt32("stride", &stride)) { 3657 stride = width; 3658 } 3659 3660 video_def->nStride = stride; 3661 3662 int32_t sliceHeight; 3663 if (!msg->findInt32("slice-height", &sliceHeight)) { 3664 sliceHeight = height; 3665 } 3666 3667 video_def->nSliceHeight = sliceHeight; 3668 3669 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3670 3671 float frameRate; 3672 if (!msg->findFloat("frame-rate", &frameRate)) { 3673 int32_t tmp; 3674 if (!msg->findInt32("frame-rate", &tmp)) { 3675 return INVALID_OPERATION; 3676 } 3677 frameRate = (float)tmp; 3678 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3679 } 3680 3681 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3682 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3683 // this is redundant as it was already set up in setVideoPortFormatType 3684 // FIXME for now skip this only for flexible YUV formats 3685 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3686 video_def->eColorFormat = colorFormat; 3687 } 3688 3689 err = mOMX->setParameter( 3690 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3691 3692 if (err != OK) { 3693 ALOGE("[%s] failed to set input port definition parameters.", 3694 mComponentName.c_str()); 3695 3696 return err; 3697 } 3698 3699 /* Output port configuration */ 3700 3701 OMX_VIDEO_CODINGTYPE compressionFormat; 3702 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3703 3704 if (err != OK) { 3705 return err; 3706 } 3707 3708 err = setVideoPortFormatType( 3709 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3710 3711 if (err != OK) { 3712 ALOGE("[%s] does not support compression format %d", 3713 mComponentName.c_str(), compressionFormat); 3714 3715 return err; 3716 } 3717 3718 def.nPortIndex = kPortIndexOutput; 3719 3720 err = mOMX->getParameter( 3721 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3722 3723 if (err != OK) { 3724 return err; 3725 } 3726 3727 video_def->nFrameWidth = width; 3728 video_def->nFrameHeight = height; 3729 video_def->xFramerate = 0; 3730 video_def->nBitrate = bitrate; 3731 video_def->eCompressionFormat = compressionFormat; 3732 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3733 3734 err = mOMX->setParameter( 3735 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3736 3737 if (err != OK) { 3738 ALOGE("[%s] failed to set output port definition parameters.", 3739 mComponentName.c_str()); 3740 3741 return err; 3742 } 3743 3744 int32_t intraRefreshPeriod = 0; 3745 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3746 && intraRefreshPeriod >= 0) { 3747 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3748 if (err != OK) { 3749 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3750 mComponentName.c_str()); 3751 err = OK; 3752 } 3753 } 3754 3755 switch (compressionFormat) { 3756 case OMX_VIDEO_CodingMPEG4: 3757 err = setupMPEG4EncoderParameters(msg); 3758 break; 3759 3760 case OMX_VIDEO_CodingH263: 3761 err = setupH263EncoderParameters(msg); 3762 break; 3763 3764 case OMX_VIDEO_CodingAVC: 3765 err = setupAVCEncoderParameters(msg); 3766 break; 3767 3768 case OMX_VIDEO_CodingHEVC: 3769 err = setupHEVCEncoderParameters(msg); 3770 break; 3771 3772 case OMX_VIDEO_CodingVP8: 3773 case OMX_VIDEO_CodingVP9: 3774 err = setupVPXEncoderParameters(msg); 3775 break; 3776 3777 default: 3778 break; 3779 } 3780 3781 // Set up color aspects on input, but propagate them to the output format, as they will 3782 // not be read back from encoder. 3783 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3784 if (err == ERROR_UNSUPPORTED) { 3785 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3786 err = OK; 3787 } 3788 3789 if (err != OK) { 3790 return err; 3791 } 3792 3793 err = setHDRStaticInfoForVideoCodec(kPortIndexInput, msg, outputFormat); 3794 if (err == ERROR_UNSUPPORTED) { // support is optional 3795 ALOGI("[%s] cannot encode HDR static metadata. Ignoring.", mComponentName.c_str()); 3796 err = OK; 3797 } 3798 3799 if (err == OK) { 3800 ALOGI("setupVideoEncoder succeeded"); 3801 } 3802 3803 return err; 3804} 3805 3806status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3807 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3808 InitOMXParams(¶ms); 3809 params.nPortIndex = kPortIndexOutput; 3810 3811 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3812 3813 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3814 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3815 int32_t mbs; 3816 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3817 return INVALID_OPERATION; 3818 } 3819 params.nCirMBs = mbs; 3820 } 3821 3822 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3823 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3824 int32_t mbs; 3825 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3826 return INVALID_OPERATION; 3827 } 3828 params.nAirMBs = mbs; 3829 3830 int32_t ref; 3831 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3832 return INVALID_OPERATION; 3833 } 3834 params.nAirRef = ref; 3835 } 3836 3837 status_t err = mOMX->setParameter( 3838 mNode, OMX_IndexParamVideoIntraRefresh, 3839 ¶ms, sizeof(params)); 3840 return err; 3841} 3842 3843static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) { 3844 if (iFramesInterval < 0) { 3845 return 0xFFFFFFFF; 3846 } else if (iFramesInterval == 0) { 3847 return 0; 3848 } 3849 OMX_U32 ret = frameRate * iFramesInterval; 3850 return ret; 3851} 3852 3853static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 3854 int32_t tmp; 3855 if (!msg->findInt32("bitrate-mode", &tmp)) { 3856 return OMX_Video_ControlRateVariable; 3857 } 3858 3859 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 3860} 3861 3862status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 3863 int32_t bitrate, iFrameInterval; 3864 if (!msg->findInt32("bitrate", &bitrate) 3865 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3866 return INVALID_OPERATION; 3867 } 3868 3869 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3870 3871 float frameRate; 3872 if (!msg->findFloat("frame-rate", &frameRate)) { 3873 int32_t tmp; 3874 if (!msg->findInt32("frame-rate", &tmp)) { 3875 return INVALID_OPERATION; 3876 } 3877 frameRate = (float)tmp; 3878 } 3879 3880 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 3881 InitOMXParams(&mpeg4type); 3882 mpeg4type.nPortIndex = kPortIndexOutput; 3883 3884 status_t err = mOMX->getParameter( 3885 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3886 3887 if (err != OK) { 3888 return err; 3889 } 3890 3891 mpeg4type.nSliceHeaderSpacing = 0; 3892 mpeg4type.bSVH = OMX_FALSE; 3893 mpeg4type.bGov = OMX_FALSE; 3894 3895 mpeg4type.nAllowedPictureTypes = 3896 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3897 3898 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3899 if (mpeg4type.nPFrames == 0) { 3900 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3901 } 3902 mpeg4type.nBFrames = 0; 3903 mpeg4type.nIDCVLCThreshold = 0; 3904 mpeg4type.bACPred = OMX_TRUE; 3905 mpeg4type.nMaxPacketSize = 256; 3906 mpeg4type.nTimeIncRes = 1000; 3907 mpeg4type.nHeaderExtension = 0; 3908 mpeg4type.bReversibleVLC = OMX_FALSE; 3909 3910 int32_t profile; 3911 if (msg->findInt32("profile", &profile)) { 3912 int32_t level; 3913 if (!msg->findInt32("level", &level)) { 3914 return INVALID_OPERATION; 3915 } 3916 3917 err = verifySupportForProfileAndLevel(profile, level); 3918 3919 if (err != OK) { 3920 return err; 3921 } 3922 3923 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 3924 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 3925 } 3926 3927 err = mOMX->setParameter( 3928 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3929 3930 if (err != OK) { 3931 return err; 3932 } 3933 3934 err = configureBitrate(bitrate, bitrateMode); 3935 3936 if (err != OK) { 3937 return err; 3938 } 3939 3940 return setupErrorCorrectionParameters(); 3941} 3942 3943status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 3944 int32_t bitrate, iFrameInterval; 3945 if (!msg->findInt32("bitrate", &bitrate) 3946 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3947 return INVALID_OPERATION; 3948 } 3949 3950 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3951 3952 float frameRate; 3953 if (!msg->findFloat("frame-rate", &frameRate)) { 3954 int32_t tmp; 3955 if (!msg->findInt32("frame-rate", &tmp)) { 3956 return INVALID_OPERATION; 3957 } 3958 frameRate = (float)tmp; 3959 } 3960 3961 OMX_VIDEO_PARAM_H263TYPE h263type; 3962 InitOMXParams(&h263type); 3963 h263type.nPortIndex = kPortIndexOutput; 3964 3965 status_t err = mOMX->getParameter( 3966 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3967 3968 if (err != OK) { 3969 return err; 3970 } 3971 3972 h263type.nAllowedPictureTypes = 3973 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3974 3975 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3976 if (h263type.nPFrames == 0) { 3977 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3978 } 3979 h263type.nBFrames = 0; 3980 3981 int32_t profile; 3982 if (msg->findInt32("profile", &profile)) { 3983 int32_t level; 3984 if (!msg->findInt32("level", &level)) { 3985 return INVALID_OPERATION; 3986 } 3987 3988 err = verifySupportForProfileAndLevel(profile, level); 3989 3990 if (err != OK) { 3991 return err; 3992 } 3993 3994 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 3995 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 3996 } 3997 3998 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 3999 h263type.bForceRoundingTypeToZero = OMX_FALSE; 4000 h263type.nPictureHeaderRepetition = 0; 4001 h263type.nGOBHeaderInterval = 0; 4002 4003 err = mOMX->setParameter( 4004 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 4005 4006 if (err != OK) { 4007 return err; 4008 } 4009 4010 err = configureBitrate(bitrate, bitrateMode); 4011 4012 if (err != OK) { 4013 return err; 4014 } 4015 4016 return setupErrorCorrectionParameters(); 4017} 4018 4019// static 4020int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 4021 int width, int height, int rate, int bitrate, 4022 OMX_VIDEO_AVCPROFILETYPE profile) { 4023 // convert bitrate to main/baseline profile kbps equivalent 4024 switch (profile) { 4025 case OMX_VIDEO_AVCProfileHigh10: 4026 bitrate = divUp(bitrate, 3000); break; 4027 case OMX_VIDEO_AVCProfileHigh: 4028 bitrate = divUp(bitrate, 1250); break; 4029 default: 4030 bitrate = divUp(bitrate, 1000); break; 4031 } 4032 4033 // convert size and rate to MBs 4034 width = divUp(width, 16); 4035 height = divUp(height, 16); 4036 int mbs = width * height; 4037 rate *= mbs; 4038 int maxDimension = max(width, height); 4039 4040 static const int limits[][5] = { 4041 /* MBps MB dim bitrate level */ 4042 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 4043 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 4044 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 4045 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 4046 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 4047 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 4048 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 4049 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 4050 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 4051 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 4052 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 4053 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 4054 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 4055 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 4056 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 4057 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 4058 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 4059 }; 4060 4061 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 4062 const int (&limit)[5] = limits[i]; 4063 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 4064 && bitrate <= limit[3]) { 4065 return limit[4]; 4066 } 4067 } 4068 return 0; 4069} 4070 4071status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 4072 int32_t bitrate, iFrameInterval; 4073 if (!msg->findInt32("bitrate", &bitrate) 4074 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4075 return INVALID_OPERATION; 4076 } 4077 4078 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4079 4080 float frameRate; 4081 if (!msg->findFloat("frame-rate", &frameRate)) { 4082 int32_t tmp; 4083 if (!msg->findInt32("frame-rate", &tmp)) { 4084 return INVALID_OPERATION; 4085 } 4086 frameRate = (float)tmp; 4087 } 4088 4089 status_t err = OK; 4090 int32_t intraRefreshMode = 0; 4091 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 4092 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 4093 if (err != OK) { 4094 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 4095 err, intraRefreshMode); 4096 return err; 4097 } 4098 } 4099 4100 OMX_VIDEO_PARAM_AVCTYPE h264type; 4101 InitOMXParams(&h264type); 4102 h264type.nPortIndex = kPortIndexOutput; 4103 4104 err = mOMX->getParameter( 4105 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4106 4107 if (err != OK) { 4108 return err; 4109 } 4110 4111 h264type.nAllowedPictureTypes = 4112 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4113 4114 int32_t profile; 4115 if (msg->findInt32("profile", &profile)) { 4116 int32_t level; 4117 if (!msg->findInt32("level", &level)) { 4118 return INVALID_OPERATION; 4119 } 4120 4121 err = verifySupportForProfileAndLevel(profile, level); 4122 4123 if (err != OK) { 4124 return err; 4125 } 4126 4127 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 4128 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 4129 } else { 4130 // Use baseline profile for AVC recording if profile is not specified. 4131 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 4132 } 4133 4134 ALOGI("setupAVCEncoderParameters with [profile: %s] [level: %s]", 4135 asString(h264type.eProfile), asString(h264type.eLevel)); 4136 4137 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 4138 h264type.nSliceHeaderSpacing = 0; 4139 h264type.bUseHadamard = OMX_TRUE; 4140 h264type.nRefFrames = 1; 4141 h264type.nBFrames = 0; 4142 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 4143 if (h264type.nPFrames == 0) { 4144 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4145 } 4146 h264type.nRefIdx10ActiveMinus1 = 0; 4147 h264type.nRefIdx11ActiveMinus1 = 0; 4148 h264type.bEntropyCodingCABAC = OMX_FALSE; 4149 h264type.bWeightedPPrediction = OMX_FALSE; 4150 h264type.bconstIpred = OMX_FALSE; 4151 h264type.bDirect8x8Inference = OMX_FALSE; 4152 h264type.bDirectSpatialTemporal = OMX_FALSE; 4153 h264type.nCabacInitIdc = 0; 4154 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 4155 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 4156 h264type.nSliceHeaderSpacing = 0; 4157 h264type.bUseHadamard = OMX_TRUE; 4158 h264type.nRefFrames = 2; 4159 h264type.nBFrames = 1; 4160 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 4161 h264type.nAllowedPictureTypes = 4162 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 4163 h264type.nRefIdx10ActiveMinus1 = 0; 4164 h264type.nRefIdx11ActiveMinus1 = 0; 4165 h264type.bEntropyCodingCABAC = OMX_TRUE; 4166 h264type.bWeightedPPrediction = OMX_TRUE; 4167 h264type.bconstIpred = OMX_TRUE; 4168 h264type.bDirect8x8Inference = OMX_TRUE; 4169 h264type.bDirectSpatialTemporal = OMX_TRUE; 4170 h264type.nCabacInitIdc = 1; 4171 } 4172 4173 if (h264type.nBFrames != 0) { 4174 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 4175 } 4176 4177 h264type.bEnableUEP = OMX_FALSE; 4178 h264type.bEnableFMO = OMX_FALSE; 4179 h264type.bEnableASO = OMX_FALSE; 4180 h264type.bEnableRS = OMX_FALSE; 4181 h264type.bFrameMBsOnly = OMX_TRUE; 4182 h264type.bMBAFF = OMX_FALSE; 4183 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 4184 4185 err = mOMX->setParameter( 4186 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4187 4188 if (err != OK) { 4189 return err; 4190 } 4191 4192 return configureBitrate(bitrate, bitrateMode); 4193} 4194 4195status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 4196 int32_t bitrate, iFrameInterval; 4197 if (!msg->findInt32("bitrate", &bitrate) 4198 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4199 return INVALID_OPERATION; 4200 } 4201 4202 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4203 4204 float frameRate; 4205 if (!msg->findFloat("frame-rate", &frameRate)) { 4206 int32_t tmp; 4207 if (!msg->findInt32("frame-rate", &tmp)) { 4208 return INVALID_OPERATION; 4209 } 4210 frameRate = (float)tmp; 4211 } 4212 4213 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 4214 InitOMXParams(&hevcType); 4215 hevcType.nPortIndex = kPortIndexOutput; 4216 4217 status_t err = OK; 4218 err = mOMX->getParameter( 4219 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4220 if (err != OK) { 4221 return err; 4222 } 4223 4224 int32_t profile; 4225 if (msg->findInt32("profile", &profile)) { 4226 int32_t level; 4227 if (!msg->findInt32("level", &level)) { 4228 return INVALID_OPERATION; 4229 } 4230 4231 err = verifySupportForProfileAndLevel(profile, level); 4232 if (err != OK) { 4233 return err; 4234 } 4235 4236 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 4237 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 4238 } 4239 // TODO: finer control? 4240 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4241 4242 err = mOMX->setParameter( 4243 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4244 if (err != OK) { 4245 return err; 4246 } 4247 4248 return configureBitrate(bitrate, bitrateMode); 4249} 4250 4251status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { 4252 int32_t bitrate; 4253 int32_t iFrameInterval = 0; 4254 size_t tsLayers = 0; 4255 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 4256 OMX_VIDEO_VPXTemporalLayerPatternNone; 4257 static const uint32_t kVp8LayerRateAlloction 4258 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 4259 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 4260 {100, 100, 100}, // 1 layer 4261 { 60, 100, 100}, // 2 layers {60%, 40%} 4262 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 4263 }; 4264 if (!msg->findInt32("bitrate", &bitrate)) { 4265 return INVALID_OPERATION; 4266 } 4267 msg->findInt32("i-frame-interval", &iFrameInterval); 4268 4269 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4270 4271 float frameRate; 4272 if (!msg->findFloat("frame-rate", &frameRate)) { 4273 int32_t tmp; 4274 if (!msg->findInt32("frame-rate", &tmp)) { 4275 return INVALID_OPERATION; 4276 } 4277 frameRate = (float)tmp; 4278 } 4279 4280 AString tsSchema; 4281 if (msg->findString("ts-schema", &tsSchema)) { 4282 unsigned int numLayers = 0; 4283 unsigned int numBLayers = 0; 4284 int tags; 4285 char dummy; 4286 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1 4287 && numLayers > 0) { 4288 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4289 tsLayers = numLayers; 4290 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c", 4291 &numLayers, &dummy, &numBLayers, &dummy)) 4292 && (tags == 1 || (tags == 3 && dummy == '+')) 4293 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) { 4294 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4295 // VPX does not have a concept of B-frames, so just count all layers 4296 tsLayers = numLayers + numBLayers; 4297 } else { 4298 ALOGW("Ignoring unsupported ts-schema [%s]", tsSchema.c_str()); 4299 } 4300 tsLayers = min(tsLayers, (size_t)OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS); 4301 } 4302 4303 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4304 InitOMXParams(&vp8type); 4305 vp8type.nPortIndex = kPortIndexOutput; 4306 status_t err = mOMX->getParameter( 4307 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4308 &vp8type, sizeof(vp8type)); 4309 4310 if (err == OK) { 4311 if (iFrameInterval > 0) { 4312 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4313 } 4314 vp8type.eTemporalPattern = pattern; 4315 vp8type.nTemporalLayerCount = tsLayers; 4316 if (tsLayers > 0) { 4317 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4318 vp8type.nTemporalLayerBitrateRatio[i] = 4319 kVp8LayerRateAlloction[tsLayers - 1][i]; 4320 } 4321 } 4322 if (bitrateMode == OMX_Video_ControlRateConstant) { 4323 vp8type.nMinQuantizer = 2; 4324 vp8type.nMaxQuantizer = 63; 4325 } 4326 4327 err = mOMX->setParameter( 4328 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4329 &vp8type, sizeof(vp8type)); 4330 if (err != OK) { 4331 ALOGW("Extended VP8 parameters set failed: %d", err); 4332 } 4333 } 4334 4335 return configureBitrate(bitrate, bitrateMode); 4336} 4337 4338status_t ACodec::verifySupportForProfileAndLevel( 4339 int32_t profile, int32_t level) { 4340 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4341 InitOMXParams(¶ms); 4342 params.nPortIndex = kPortIndexOutput; 4343 4344 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4345 params.nProfileIndex = index; 4346 status_t err = mOMX->getParameter( 4347 mNode, 4348 OMX_IndexParamVideoProfileLevelQuerySupported, 4349 ¶ms, 4350 sizeof(params)); 4351 4352 if (err != OK) { 4353 return err; 4354 } 4355 4356 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4357 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4358 4359 if (profile == supportedProfile && level <= supportedLevel) { 4360 return OK; 4361 } 4362 4363 if (index == kMaxIndicesToCheck) { 4364 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4365 mComponentName.c_str(), index, 4366 params.eProfile, params.eLevel); 4367 } 4368 } 4369 return ERROR_UNSUPPORTED; 4370} 4371 4372status_t ACodec::configureBitrate( 4373 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4374 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4375 InitOMXParams(&bitrateType); 4376 bitrateType.nPortIndex = kPortIndexOutput; 4377 4378 status_t err = mOMX->getParameter( 4379 mNode, OMX_IndexParamVideoBitrate, 4380 &bitrateType, sizeof(bitrateType)); 4381 4382 if (err != OK) { 4383 return err; 4384 } 4385 4386 bitrateType.eControlRate = bitrateMode; 4387 bitrateType.nTargetBitrate = bitrate; 4388 4389 return mOMX->setParameter( 4390 mNode, OMX_IndexParamVideoBitrate, 4391 &bitrateType, sizeof(bitrateType)); 4392} 4393 4394status_t ACodec::setupErrorCorrectionParameters() { 4395 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4396 InitOMXParams(&errorCorrectionType); 4397 errorCorrectionType.nPortIndex = kPortIndexOutput; 4398 4399 status_t err = mOMX->getParameter( 4400 mNode, OMX_IndexParamVideoErrorCorrection, 4401 &errorCorrectionType, sizeof(errorCorrectionType)); 4402 4403 if (err != OK) { 4404 return OK; // Optional feature. Ignore this failure 4405 } 4406 4407 errorCorrectionType.bEnableHEC = OMX_FALSE; 4408 errorCorrectionType.bEnableResync = OMX_TRUE; 4409 errorCorrectionType.nResynchMarkerSpacing = 256; 4410 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4411 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4412 4413 return mOMX->setParameter( 4414 mNode, OMX_IndexParamVideoErrorCorrection, 4415 &errorCorrectionType, sizeof(errorCorrectionType)); 4416} 4417 4418status_t ACodec::setVideoFormatOnPort( 4419 OMX_U32 portIndex, 4420 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4421 float frameRate) { 4422 OMX_PARAM_PORTDEFINITIONTYPE def; 4423 InitOMXParams(&def); 4424 def.nPortIndex = portIndex; 4425 4426 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4427 4428 status_t err = mOMX->getParameter( 4429 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4430 if (err != OK) { 4431 return err; 4432 } 4433 4434 if (portIndex == kPortIndexInput) { 4435 // XXX Need a (much) better heuristic to compute input buffer sizes. 4436 const size_t X = 64 * 1024; 4437 if (def.nBufferSize < X) { 4438 def.nBufferSize = X; 4439 } 4440 } 4441 4442 if (def.eDomain != OMX_PortDomainVideo) { 4443 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4444 return FAILED_TRANSACTION; 4445 } 4446 4447 video_def->nFrameWidth = width; 4448 video_def->nFrameHeight = height; 4449 4450 if (portIndex == kPortIndexInput) { 4451 video_def->eCompressionFormat = compressionFormat; 4452 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4453 if (frameRate >= 0) { 4454 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4455 } 4456 } 4457 4458 err = mOMX->setParameter( 4459 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4460 4461 return err; 4462} 4463 4464status_t ACodec::initNativeWindow() { 4465 if (mNativeWindow != NULL) { 4466 return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4467 } 4468 4469 mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4470 return OK; 4471} 4472 4473size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4474 size_t n = 0; 4475 4476 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4477 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4478 4479 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4480 ++n; 4481 } 4482 } 4483 4484 return n; 4485} 4486 4487size_t ACodec::countBuffersOwnedByNativeWindow() const { 4488 size_t n = 0; 4489 4490 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4491 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4492 4493 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4494 ++n; 4495 } 4496 } 4497 4498 return n; 4499} 4500 4501void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4502 if (mNativeWindow == NULL) { 4503 return; 4504 } 4505 4506 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4507 && dequeueBufferFromNativeWindow() != NULL) { 4508 // these buffers will be submitted as regular buffers; account for this 4509 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4510 --mMetadataBuffersToSubmit; 4511 } 4512 } 4513} 4514 4515bool ACodec::allYourBuffersAreBelongToUs( 4516 OMX_U32 portIndex) { 4517 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4518 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4519 4520 if (info->mStatus != BufferInfo::OWNED_BY_US 4521 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4522 ALOGV("[%s] Buffer %u on port %u still has status %d", 4523 mComponentName.c_str(), 4524 info->mBufferID, portIndex, info->mStatus); 4525 return false; 4526 } 4527 } 4528 4529 return true; 4530} 4531 4532bool ACodec::allYourBuffersAreBelongToUs() { 4533 return allYourBuffersAreBelongToUs(kPortIndexInput) 4534 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4535} 4536 4537void ACodec::deferMessage(const sp<AMessage> &msg) { 4538 mDeferredQueue.push_back(msg); 4539} 4540 4541void ACodec::processDeferredMessages() { 4542 List<sp<AMessage> > queue = mDeferredQueue; 4543 mDeferredQueue.clear(); 4544 4545 List<sp<AMessage> >::iterator it = queue.begin(); 4546 while (it != queue.end()) { 4547 onMessageReceived(*it++); 4548 } 4549} 4550 4551// static 4552bool ACodec::describeDefaultColorFormat(DescribeColorFormat2Params ¶ms) { 4553 MediaImage2 &image = params.sMediaImage; 4554 memset(&image, 0, sizeof(image)); 4555 4556 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4557 image.mNumPlanes = 0; 4558 4559 const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat; 4560 image.mWidth = params.nFrameWidth; 4561 image.mHeight = params.nFrameHeight; 4562 4563 // only supporting YUV420 4564 if (fmt != OMX_COLOR_FormatYUV420Planar && 4565 fmt != OMX_COLOR_FormatYUV420PackedPlanar && 4566 fmt != OMX_COLOR_FormatYUV420SemiPlanar && 4567 fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar && 4568 fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) { 4569 ALOGW("do not know color format 0x%x = %d", fmt, fmt); 4570 return false; 4571 } 4572 4573 // TEMPORARY FIX for some vendors that advertise sliceHeight as 0 4574 if (params.nStride != 0 && params.nSliceHeight == 0) { 4575 ALOGW("using sliceHeight=%u instead of what codec advertised (=0)", 4576 params.nFrameHeight); 4577 params.nSliceHeight = params.nFrameHeight; 4578 } 4579 4580 // we need stride and slice-height to be non-zero and sensible. These values were chosen to 4581 // prevent integer overflows further down the line, and do not indicate support for 4582 // 32kx32k video. 4583 if (params.nStride == 0 || params.nSliceHeight == 0 4584 || params.nStride > 32768 || params.nSliceHeight > 32768) { 4585 ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u", 4586 fmt, fmt, params.nStride, params.nSliceHeight); 4587 return false; 4588 } 4589 4590 // set-up YUV format 4591 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV; 4592 image.mNumPlanes = 3; 4593 image.mBitDepth = 8; 4594 image.mBitDepthAllocated = 8; 4595 image.mPlane[image.Y].mOffset = 0; 4596 image.mPlane[image.Y].mColInc = 1; 4597 image.mPlane[image.Y].mRowInc = params.nStride; 4598 image.mPlane[image.Y].mHorizSubsampling = 1; 4599 image.mPlane[image.Y].mVertSubsampling = 1; 4600 4601 switch ((int)fmt) { 4602 case HAL_PIXEL_FORMAT_YV12: 4603 if (params.bUsingNativeBuffers) { 4604 size_t ystride = align(params.nStride, 16); 4605 size_t cstride = align(params.nStride / 2, 16); 4606 image.mPlane[image.Y].mRowInc = ystride; 4607 4608 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight; 4609 image.mPlane[image.V].mColInc = 1; 4610 image.mPlane[image.V].mRowInc = cstride; 4611 image.mPlane[image.V].mHorizSubsampling = 2; 4612 image.mPlane[image.V].mVertSubsampling = 2; 4613 4614 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset 4615 + (cstride * params.nSliceHeight / 2); 4616 image.mPlane[image.U].mColInc = 1; 4617 image.mPlane[image.U].mRowInc = cstride; 4618 image.mPlane[image.U].mHorizSubsampling = 2; 4619 image.mPlane[image.U].mVertSubsampling = 2; 4620 break; 4621 } else { 4622 // fall through as YV12 is used for YUV420Planar by some codecs 4623 } 4624 4625 case OMX_COLOR_FormatYUV420Planar: 4626 case OMX_COLOR_FormatYUV420PackedPlanar: 4627 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4628 image.mPlane[image.U].mColInc = 1; 4629 image.mPlane[image.U].mRowInc = params.nStride / 2; 4630 image.mPlane[image.U].mHorizSubsampling = 2; 4631 image.mPlane[image.U].mVertSubsampling = 2; 4632 4633 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset 4634 + (params.nStride * params.nSliceHeight / 4); 4635 image.mPlane[image.V].mColInc = 1; 4636 image.mPlane[image.V].mRowInc = params.nStride / 2; 4637 image.mPlane[image.V].mHorizSubsampling = 2; 4638 image.mPlane[image.V].mVertSubsampling = 2; 4639 break; 4640 4641 case OMX_COLOR_FormatYUV420SemiPlanar: 4642 // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder 4643 case OMX_COLOR_FormatYUV420PackedSemiPlanar: 4644 // NV12 4645 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4646 image.mPlane[image.U].mColInc = 2; 4647 image.mPlane[image.U].mRowInc = params.nStride; 4648 image.mPlane[image.U].mHorizSubsampling = 2; 4649 image.mPlane[image.U].mVertSubsampling = 2; 4650 4651 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1; 4652 image.mPlane[image.V].mColInc = 2; 4653 image.mPlane[image.V].mRowInc = params.nStride; 4654 image.mPlane[image.V].mHorizSubsampling = 2; 4655 image.mPlane[image.V].mVertSubsampling = 2; 4656 break; 4657 4658 default: 4659 TRESPASS(); 4660 } 4661 return true; 4662} 4663 4664// static 4665bool ACodec::describeColorFormat( 4666 const sp<IOMX> &omx, IOMX::node_id node, 4667 DescribeColorFormat2Params &describeParams) 4668{ 4669 OMX_INDEXTYPE describeColorFormatIndex; 4670 if (omx->getExtensionIndex( 4671 node, "OMX.google.android.index.describeColorFormat", 4672 &describeColorFormatIndex) == OK) { 4673 DescribeColorFormatParams describeParamsV1(describeParams); 4674 if (omx->getParameter( 4675 node, describeColorFormatIndex, 4676 &describeParamsV1, sizeof(describeParamsV1)) == OK) { 4677 describeParams.initFromV1(describeParamsV1); 4678 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4679 } 4680 } else if (omx->getExtensionIndex( 4681 node, "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK 4682 && omx->getParameter( 4683 node, describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) { 4684 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4685 } 4686 4687 return describeDefaultColorFormat(describeParams); 4688} 4689 4690// static 4691bool ACodec::isFlexibleColorFormat( 4692 const sp<IOMX> &omx, IOMX::node_id node, 4693 uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) { 4694 DescribeColorFormat2Params describeParams; 4695 InitOMXParams(&describeParams); 4696 describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; 4697 // reasonable dummy values 4698 describeParams.nFrameWidth = 128; 4699 describeParams.nFrameHeight = 128; 4700 describeParams.nStride = 128; 4701 describeParams.nSliceHeight = 128; 4702 describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers; 4703 4704 CHECK(flexibleEquivalent != NULL); 4705 4706 if (!describeColorFormat(omx, node, describeParams)) { 4707 return false; 4708 } 4709 4710 const MediaImage2 &img = describeParams.sMediaImage; 4711 if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) { 4712 if (img.mNumPlanes != 3 4713 || img.mPlane[img.Y].mHorizSubsampling != 1 4714 || img.mPlane[img.Y].mVertSubsampling != 1) { 4715 return false; 4716 } 4717 4718 // YUV 420 4719 if (img.mPlane[img.U].mHorizSubsampling == 2 4720 && img.mPlane[img.U].mVertSubsampling == 2 4721 && img.mPlane[img.V].mHorizSubsampling == 2 4722 && img.mPlane[img.V].mVertSubsampling == 2) { 4723 // possible flexible YUV420 format 4724 if (img.mBitDepth <= 8) { 4725 *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible; 4726 return true; 4727 } 4728 } 4729 } 4730 return false; 4731} 4732 4733status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4734 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4735 OMX_PARAM_PORTDEFINITIONTYPE def; 4736 InitOMXParams(&def); 4737 def.nPortIndex = portIndex; 4738 4739 status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4740 if (err != OK) { 4741 return err; 4742 } 4743 4744 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4745 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4746 return BAD_VALUE; 4747 } 4748 4749 switch (def.eDomain) { 4750 case OMX_PortDomainVideo: 4751 { 4752 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4753 switch ((int)videoDef->eCompressionFormat) { 4754 case OMX_VIDEO_CodingUnused: 4755 { 4756 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4757 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4758 4759 notify->setInt32("stride", videoDef->nStride); 4760 notify->setInt32("slice-height", videoDef->nSliceHeight); 4761 notify->setInt32("color-format", videoDef->eColorFormat); 4762 4763 if (mNativeWindow == NULL) { 4764 DescribeColorFormat2Params describeParams; 4765 InitOMXParams(&describeParams); 4766 describeParams.eColorFormat = videoDef->eColorFormat; 4767 describeParams.nFrameWidth = videoDef->nFrameWidth; 4768 describeParams.nFrameHeight = videoDef->nFrameHeight; 4769 describeParams.nStride = videoDef->nStride; 4770 describeParams.nSliceHeight = videoDef->nSliceHeight; 4771 describeParams.bUsingNativeBuffers = OMX_FALSE; 4772 4773 if (describeColorFormat(mOMX, mNode, describeParams)) { 4774 notify->setBuffer( 4775 "image-data", 4776 ABuffer::CreateAsCopy( 4777 &describeParams.sMediaImage, 4778 sizeof(describeParams.sMediaImage))); 4779 4780 MediaImage2 &img = describeParams.sMediaImage; 4781 MediaImage2::PlaneInfo *plane = img.mPlane; 4782 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4783 mComponentName.c_str(), img.mWidth, img.mHeight, 4784 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4785 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4786 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4787 } 4788 } 4789 4790 int32_t width = (int32_t)videoDef->nFrameWidth; 4791 int32_t height = (int32_t)videoDef->nFrameHeight; 4792 4793 if (portIndex == kPortIndexOutput) { 4794 OMX_CONFIG_RECTTYPE rect; 4795 InitOMXParams(&rect); 4796 rect.nPortIndex = portIndex; 4797 4798 if (mOMX->getConfig( 4799 mNode, 4800 (portIndex == kPortIndexOutput ? 4801 OMX_IndexConfigCommonOutputCrop : 4802 OMX_IndexConfigCommonInputCrop), 4803 &rect, sizeof(rect)) != OK) { 4804 rect.nLeft = 0; 4805 rect.nTop = 0; 4806 rect.nWidth = videoDef->nFrameWidth; 4807 rect.nHeight = videoDef->nFrameHeight; 4808 } 4809 4810 if (rect.nLeft < 0 || 4811 rect.nTop < 0 || 4812 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4813 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4814 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 4815 rect.nLeft, rect.nTop, 4816 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 4817 videoDef->nFrameWidth, videoDef->nFrameHeight); 4818 return BAD_VALUE; 4819 } 4820 4821 notify->setRect( 4822 "crop", 4823 rect.nLeft, 4824 rect.nTop, 4825 rect.nLeft + rect.nWidth - 1, 4826 rect.nTop + rect.nHeight - 1); 4827 4828 width = rect.nWidth; 4829 height = rect.nHeight; 4830 4831 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4832 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4833 width, height, mConfigFormat, notify, 4834 mUsingNativeWindow ? &dataSpace : NULL); 4835 if (mUsingNativeWindow) { 4836 notify->setInt32("android._dataspace", dataSpace); 4837 } 4838 (void)getHDRStaticInfoForVideoCodec(kPortIndexOutput, notify); 4839 } else { 4840 (void)getInputColorAspectsForVideoEncoder(notify); 4841 if (mConfigFormat->contains("hdr-static-info")) { 4842 (void)getHDRStaticInfoForVideoCodec(kPortIndexInput, notify); 4843 } 4844 } 4845 4846 break; 4847 } 4848 4849 case OMX_VIDEO_CodingVP8: 4850 case OMX_VIDEO_CodingVP9: 4851 { 4852 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4853 InitOMXParams(&vp8type); 4854 vp8type.nPortIndex = kPortIndexOutput; 4855 status_t err = mOMX->getParameter( 4856 mNode, 4857 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4858 &vp8type, 4859 sizeof(vp8type)); 4860 4861 if (err == OK) { 4862 AString tsSchema = "none"; 4863 if (vp8type.eTemporalPattern 4864 == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { 4865 switch (vp8type.nTemporalLayerCount) { 4866 case 1: 4867 { 4868 tsSchema = "webrtc.vp8.1-layer"; 4869 break; 4870 } 4871 case 2: 4872 { 4873 tsSchema = "webrtc.vp8.2-layer"; 4874 break; 4875 } 4876 case 3: 4877 { 4878 tsSchema = "webrtc.vp8.3-layer"; 4879 break; 4880 } 4881 default: 4882 { 4883 break; 4884 } 4885 } 4886 } 4887 notify->setString("ts-schema", tsSchema); 4888 } 4889 // Fall through to set up mime. 4890 } 4891 4892 default: 4893 { 4894 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 4895 // should be CodingUnused 4896 ALOGE("Raw port video compression format is %s(%d)", 4897 asString(videoDef->eCompressionFormat), 4898 videoDef->eCompressionFormat); 4899 return BAD_VALUE; 4900 } 4901 AString mime; 4902 if (GetMimeTypeForVideoCoding( 4903 videoDef->eCompressionFormat, &mime) != OK) { 4904 notify->setString("mime", "application/octet-stream"); 4905 } else { 4906 notify->setString("mime", mime.c_str()); 4907 } 4908 uint32_t intraRefreshPeriod = 0; 4909 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 4910 && intraRefreshPeriod > 0) { 4911 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 4912 } 4913 break; 4914 } 4915 } 4916 notify->setInt32("width", videoDef->nFrameWidth); 4917 notify->setInt32("height", videoDef->nFrameHeight); 4918 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 4919 portIndex == kPortIndexInput ? "input" : "output", 4920 notify->debugString().c_str()); 4921 4922 break; 4923 } 4924 4925 case OMX_PortDomainAudio: 4926 { 4927 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 4928 4929 switch ((int)audioDef->eEncoding) { 4930 case OMX_AUDIO_CodingPCM: 4931 { 4932 OMX_AUDIO_PARAM_PCMMODETYPE params; 4933 InitOMXParams(¶ms); 4934 params.nPortIndex = portIndex; 4935 4936 err = mOMX->getParameter( 4937 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4938 if (err != OK) { 4939 return err; 4940 } 4941 4942 if (params.nChannels <= 0 4943 || (params.nChannels != 1 && !params.bInterleaved) 4944 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 4945 ALOGE("unsupported PCM port: %u channels%s, %u-bit", 4946 params.nChannels, 4947 params.bInterleaved ? " interleaved" : "", 4948 params.nBitPerSample); 4949 return FAILED_TRANSACTION; 4950 } 4951 4952 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 4953 notify->setInt32("channel-count", params.nChannels); 4954 notify->setInt32("sample-rate", params.nSamplingRate); 4955 4956 AudioEncoding encoding = kAudioEncodingPcm16bit; 4957 if (params.eNumData == OMX_NumericalDataUnsigned 4958 && params.nBitPerSample == 8u) { 4959 encoding = kAudioEncodingPcm8bit; 4960 } else if (params.eNumData == OMX_NumericalDataFloat 4961 && params.nBitPerSample == 32u) { 4962 encoding = kAudioEncodingPcmFloat; 4963 } else if (params.nBitPerSample != 16u 4964 || params.eNumData != OMX_NumericalDataSigned) { 4965 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", 4966 asString(params.eNumData), params.eNumData, 4967 asString(params.ePCMMode), params.ePCMMode); 4968 return FAILED_TRANSACTION; 4969 } 4970 notify->setInt32("pcm-encoding", encoding); 4971 4972 if (mChannelMaskPresent) { 4973 notify->setInt32("channel-mask", mChannelMask); 4974 } 4975 break; 4976 } 4977 4978 case OMX_AUDIO_CodingAAC: 4979 { 4980 OMX_AUDIO_PARAM_AACPROFILETYPE params; 4981 InitOMXParams(¶ms); 4982 params.nPortIndex = portIndex; 4983 4984 err = mOMX->getParameter( 4985 mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 4986 if (err != OK) { 4987 return err; 4988 } 4989 4990 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 4991 notify->setInt32("channel-count", params.nChannels); 4992 notify->setInt32("sample-rate", params.nSampleRate); 4993 break; 4994 } 4995 4996 case OMX_AUDIO_CodingAMR: 4997 { 4998 OMX_AUDIO_PARAM_AMRTYPE params; 4999 InitOMXParams(¶ms); 5000 params.nPortIndex = portIndex; 5001 5002 err = mOMX->getParameter( 5003 mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 5004 if (err != OK) { 5005 return err; 5006 } 5007 5008 notify->setInt32("channel-count", 1); 5009 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 5010 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 5011 notify->setInt32("sample-rate", 16000); 5012 } else { 5013 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 5014 notify->setInt32("sample-rate", 8000); 5015 } 5016 break; 5017 } 5018 5019 case OMX_AUDIO_CodingFLAC: 5020 { 5021 OMX_AUDIO_PARAM_FLACTYPE params; 5022 InitOMXParams(¶ms); 5023 params.nPortIndex = portIndex; 5024 5025 err = mOMX->getParameter( 5026 mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 5027 if (err != OK) { 5028 return err; 5029 } 5030 5031 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 5032 notify->setInt32("channel-count", params.nChannels); 5033 notify->setInt32("sample-rate", params.nSampleRate); 5034 break; 5035 } 5036 5037 case OMX_AUDIO_CodingMP3: 5038 { 5039 OMX_AUDIO_PARAM_MP3TYPE params; 5040 InitOMXParams(¶ms); 5041 params.nPortIndex = portIndex; 5042 5043 err = mOMX->getParameter( 5044 mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 5045 if (err != OK) { 5046 return err; 5047 } 5048 5049 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 5050 notify->setInt32("channel-count", params.nChannels); 5051 notify->setInt32("sample-rate", params.nSampleRate); 5052 break; 5053 } 5054 5055 case OMX_AUDIO_CodingVORBIS: 5056 { 5057 OMX_AUDIO_PARAM_VORBISTYPE params; 5058 InitOMXParams(¶ms); 5059 params.nPortIndex = portIndex; 5060 5061 err = mOMX->getParameter( 5062 mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 5063 if (err != OK) { 5064 return err; 5065 } 5066 5067 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 5068 notify->setInt32("channel-count", params.nChannels); 5069 notify->setInt32("sample-rate", params.nSampleRate); 5070 break; 5071 } 5072 5073 case OMX_AUDIO_CodingAndroidAC3: 5074 { 5075 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 5076 InitOMXParams(¶ms); 5077 params.nPortIndex = portIndex; 5078 5079 err = mOMX->getParameter( 5080 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 5081 ¶ms, sizeof(params)); 5082 if (err != OK) { 5083 return err; 5084 } 5085 5086 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 5087 notify->setInt32("channel-count", params.nChannels); 5088 notify->setInt32("sample-rate", params.nSampleRate); 5089 break; 5090 } 5091 5092 case OMX_AUDIO_CodingAndroidEAC3: 5093 { 5094 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 5095 InitOMXParams(¶ms); 5096 params.nPortIndex = portIndex; 5097 5098 err = mOMX->getParameter( 5099 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 5100 ¶ms, sizeof(params)); 5101 if (err != OK) { 5102 return err; 5103 } 5104 5105 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 5106 notify->setInt32("channel-count", params.nChannels); 5107 notify->setInt32("sample-rate", params.nSampleRate); 5108 break; 5109 } 5110 5111 case OMX_AUDIO_CodingAndroidOPUS: 5112 { 5113 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 5114 InitOMXParams(¶ms); 5115 params.nPortIndex = portIndex; 5116 5117 err = mOMX->getParameter( 5118 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 5119 ¶ms, sizeof(params)); 5120 if (err != OK) { 5121 return err; 5122 } 5123 5124 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 5125 notify->setInt32("channel-count", params.nChannels); 5126 notify->setInt32("sample-rate", params.nSampleRate); 5127 break; 5128 } 5129 5130 case OMX_AUDIO_CodingG711: 5131 { 5132 OMX_AUDIO_PARAM_PCMMODETYPE params; 5133 InitOMXParams(¶ms); 5134 params.nPortIndex = portIndex; 5135 5136 err = mOMX->getParameter( 5137 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5138 if (err != OK) { 5139 return err; 5140 } 5141 5142 const char *mime = NULL; 5143 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 5144 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 5145 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 5146 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 5147 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 5148 mime = MEDIA_MIMETYPE_AUDIO_RAW; 5149 } 5150 notify->setString("mime", mime); 5151 notify->setInt32("channel-count", params.nChannels); 5152 notify->setInt32("sample-rate", params.nSamplingRate); 5153 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 5154 break; 5155 } 5156 5157 case OMX_AUDIO_CodingGSMFR: 5158 { 5159 OMX_AUDIO_PARAM_PCMMODETYPE params; 5160 InitOMXParams(¶ms); 5161 params.nPortIndex = portIndex; 5162 5163 err = mOMX->getParameter( 5164 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5165 if (err != OK) { 5166 return err; 5167 } 5168 5169 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 5170 notify->setInt32("channel-count", params.nChannels); 5171 notify->setInt32("sample-rate", params.nSamplingRate); 5172 break; 5173 } 5174 5175 default: 5176 ALOGE("Unsupported audio coding: %s(%d)\n", 5177 asString(audioDef->eEncoding), audioDef->eEncoding); 5178 return BAD_TYPE; 5179 } 5180 break; 5181 } 5182 5183 default: 5184 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 5185 return BAD_TYPE; 5186 } 5187 5188 return OK; 5189} 5190 5191void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 5192 // aspects are normally communicated in ColorAspects 5193 int32_t range, standard, transfer; 5194 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 5195 5196 // if some aspects are unspecified, use dataspace fields 5197 if (range != 0) { 5198 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 5199 } 5200 if (standard != 0) { 5201 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 5202 } 5203 if (transfer != 0) { 5204 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 5205 } 5206 5207 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 5208 if (range != 0) { 5209 mOutputFormat->setInt32("color-range", range); 5210 } 5211 if (standard != 0) { 5212 mOutputFormat->setInt32("color-standard", standard); 5213 } 5214 if (transfer != 0) { 5215 mOutputFormat->setInt32("color-transfer", transfer); 5216 } 5217 5218 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 5219 "(R:%d(%s), S:%d(%s), T:%d(%s))", 5220 dataSpace, 5221 aspects.mRange, asString(aspects.mRange), 5222 aspects.mPrimaries, asString(aspects.mPrimaries), 5223 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 5224 aspects.mTransfer, asString(aspects.mTransfer), 5225 range, asString((ColorRange)range), 5226 standard, asString((ColorStandard)standard), 5227 transfer, asString((ColorTransfer)transfer)); 5228} 5229 5230void ACodec::onOutputFormatChanged(sp<const AMessage> expectedFormat) { 5231 // store new output format, at the same time mark that this is no longer the first frame 5232 mOutputFormat = mBaseOutputFormat->dup(); 5233 5234 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 5235 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 5236 return; 5237 } 5238 5239 if (expectedFormat != NULL) { 5240 sp<const AMessage> changes = expectedFormat->changesFrom(mOutputFormat); 5241 sp<const AMessage> to = mOutputFormat->changesFrom(expectedFormat); 5242 if (changes->countEntries() != 0 || to->countEntries() != 0) { 5243 ALOGW("[%s] BAD CODEC: Output format changed unexpectedly from (diff) %s to (diff) %s", 5244 mComponentName.c_str(), 5245 changes->debugString(4).c_str(), to->debugString(4).c_str()); 5246 } 5247 } 5248 5249 if (!mIsVideo && !mIsEncoder) { 5250 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 5251 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5252 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 5253 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5254 5255 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 5256 if (mConverter[kPortIndexOutput] != NULL) { 5257 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 5258 } 5259 } 5260 5261 if (mTunneled) { 5262 sendFormatChange(); 5263 } 5264} 5265 5266void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 5267 AString mime; 5268 CHECK(mOutputFormat->findString("mime", &mime)); 5269 5270 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 5271 // notify renderer of the crop change and dataspace change 5272 // NOTE: native window uses extended right-bottom coordinate 5273 int32_t left, top, right, bottom; 5274 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 5275 notify->setRect("crop", left, top, right + 1, bottom + 1); 5276 } 5277 5278 int32_t dataSpace; 5279 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 5280 notify->setInt32("dataspace", dataSpace); 5281 } 5282 } 5283} 5284 5285void ACodec::sendFormatChange() { 5286 AString mime; 5287 CHECK(mOutputFormat->findString("mime", &mime)); 5288 5289 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 5290 int32_t channelCount, sampleRate; 5291 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 5292 CHECK(mOutputFormat->findInt32("sample-rate", &sampleRate)); 5293 if (mSampleRate != 0 && sampleRate != 0) { 5294 mEncoderDelay = mEncoderDelay * sampleRate / mSampleRate; 5295 mEncoderPadding = mEncoderPadding * sampleRate / mSampleRate; 5296 mSampleRate = sampleRate; 5297 } 5298 if (mSkipCutBuffer != NULL) { 5299 size_t prevbufsize = mSkipCutBuffer->size(); 5300 if (prevbufsize != 0) { 5301 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 5302 } 5303 } 5304 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 5305 } 5306 5307 sp<AMessage> notify = mNotify->dup(); 5308 notify->setInt32("what", kWhatOutputFormatChanged); 5309 notify->setMessage("format", mOutputFormat); 5310 notify->post(); 5311 5312 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 5313 mLastOutputFormat = mOutputFormat; 5314} 5315 5316void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 5317 sp<AMessage> notify = mNotify->dup(); 5318 notify->setInt32("what", CodecBase::kWhatError); 5319 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 5320 5321 if (internalError == UNKNOWN_ERROR) { // find better error code 5322 const status_t omxStatus = statusFromOMXError(error); 5323 if (omxStatus != 0) { 5324 internalError = omxStatus; 5325 } else { 5326 ALOGW("Invalid OMX error %#x", error); 5327 } 5328 } 5329 5330 mFatalError = true; 5331 5332 notify->setInt32("err", internalError); 5333 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5334 notify->post(); 5335} 5336 5337//////////////////////////////////////////////////////////////////////////////// 5338 5339ACodec::PortDescription::PortDescription() { 5340} 5341 5342status_t ACodec::requestIDRFrame() { 5343 if (!mIsEncoder) { 5344 return ERROR_UNSUPPORTED; 5345 } 5346 5347 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5348 InitOMXParams(¶ms); 5349 5350 params.nPortIndex = kPortIndexOutput; 5351 params.IntraRefreshVOP = OMX_TRUE; 5352 5353 return mOMX->setConfig( 5354 mNode, 5355 OMX_IndexConfigVideoIntraVOPRefresh, 5356 ¶ms, 5357 sizeof(params)); 5358} 5359 5360void ACodec::PortDescription::addBuffer( 5361 IOMX::buffer_id id, const sp<ABuffer> &buffer, 5362 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) { 5363 mBufferIDs.push_back(id); 5364 mBuffers.push_back(buffer); 5365 mHandles.push_back(handle); 5366 mMemRefs.push_back(memRef); 5367} 5368 5369size_t ACodec::PortDescription::countBuffers() { 5370 return mBufferIDs.size(); 5371} 5372 5373IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5374 return mBufferIDs.itemAt(index); 5375} 5376 5377sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5378 return mBuffers.itemAt(index); 5379} 5380 5381sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const { 5382 return mHandles.itemAt(index); 5383} 5384 5385sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const { 5386 return mMemRefs.itemAt(index); 5387} 5388 5389//////////////////////////////////////////////////////////////////////////////// 5390 5391ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5392 : AState(parentState), 5393 mCodec(codec) { 5394} 5395 5396ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5397 OMX_U32 /* portIndex */) { 5398 return KEEP_BUFFERS; 5399} 5400 5401bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5402 switch (msg->what()) { 5403 case kWhatInputBufferFilled: 5404 { 5405 onInputBufferFilled(msg); 5406 break; 5407 } 5408 5409 case kWhatOutputBufferDrained: 5410 { 5411 onOutputBufferDrained(msg); 5412 break; 5413 } 5414 5415 case ACodec::kWhatOMXMessageList: 5416 { 5417 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5418 } 5419 5420 case ACodec::kWhatOMXMessageItem: 5421 { 5422 // no need to check as we already did it for kWhatOMXMessageList 5423 return onOMXMessage(msg); 5424 } 5425 5426 case ACodec::kWhatOMXMessage: 5427 { 5428 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5429 } 5430 5431 case ACodec::kWhatSetSurface: 5432 { 5433 sp<AReplyToken> replyID; 5434 CHECK(msg->senderAwaitsResponse(&replyID)); 5435 5436 sp<RefBase> obj; 5437 CHECK(msg->findObject("surface", &obj)); 5438 5439 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5440 5441 sp<AMessage> response = new AMessage; 5442 response->setInt32("err", err); 5443 response->postReply(replyID); 5444 break; 5445 } 5446 5447 case ACodec::kWhatCreateInputSurface: 5448 case ACodec::kWhatSetInputSurface: 5449 case ACodec::kWhatSignalEndOfInputStream: 5450 { 5451 // This may result in an app illegal state exception. 5452 ALOGE("Message 0x%x was not handled", msg->what()); 5453 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5454 return true; 5455 } 5456 5457 case ACodec::kWhatOMXDied: 5458 { 5459 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5460 ALOGE("OMX/mediaserver died, signalling error!"); 5461 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5462 break; 5463 } 5464 5465 case ACodec::kWhatReleaseCodecInstance: 5466 { 5467 ALOGI("[%s] forcing the release of codec", 5468 mCodec->mComponentName.c_str()); 5469 status_t err = mCodec->mOMX->freeNode(mCodec->mNode); 5470 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5471 mCodec->mComponentName.c_str(), err); 5472 sp<AMessage> notify = mCodec->mNotify->dup(); 5473 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5474 notify->post(); 5475 break; 5476 } 5477 5478 default: 5479 return false; 5480 } 5481 5482 return true; 5483} 5484 5485bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5486 // there is a possibility that this is an outstanding message for a 5487 // codec that we have already destroyed 5488 if (mCodec->mNode == 0) { 5489 ALOGI("ignoring message as already freed component: %s", 5490 msg->debugString().c_str()); 5491 return false; 5492 } 5493 5494 IOMX::node_id nodeID; 5495 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 5496 if (nodeID != mCodec->mNode) { 5497 ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode); 5498 return false; 5499 } 5500 return true; 5501} 5502 5503bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5504 sp<RefBase> obj; 5505 CHECK(msg->findObject("messages", &obj)); 5506 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5507 5508 bool receivedRenderedEvents = false; 5509 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5510 it != msgList->getList().cend(); ++it) { 5511 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5512 mCodec->handleMessage(*it); 5513 int32_t type; 5514 CHECK((*it)->findInt32("type", &type)); 5515 if (type == omx_message::FRAME_RENDERED) { 5516 receivedRenderedEvents = true; 5517 } 5518 } 5519 5520 if (receivedRenderedEvents) { 5521 // NOTE: all buffers are rendered in this case 5522 mCodec->notifyOfRenderedFrames(); 5523 } 5524 return true; 5525} 5526 5527bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5528 int32_t type; 5529 CHECK(msg->findInt32("type", &type)); 5530 5531 switch (type) { 5532 case omx_message::EVENT: 5533 { 5534 int32_t event, data1, data2; 5535 CHECK(msg->findInt32("event", &event)); 5536 CHECK(msg->findInt32("data1", &data1)); 5537 CHECK(msg->findInt32("data2", &data2)); 5538 5539 if (event == OMX_EventCmdComplete 5540 && data1 == OMX_CommandFlush 5541 && data2 == (int32_t)OMX_ALL) { 5542 // Use of this notification is not consistent across 5543 // implementations. We'll drop this notification and rely 5544 // on flush-complete notifications on the individual port 5545 // indices instead. 5546 5547 return true; 5548 } 5549 5550 return onOMXEvent( 5551 static_cast<OMX_EVENTTYPE>(event), 5552 static_cast<OMX_U32>(data1), 5553 static_cast<OMX_U32>(data2)); 5554 } 5555 5556 case omx_message::EMPTY_BUFFER_DONE: 5557 { 5558 IOMX::buffer_id bufferID; 5559 int32_t fenceFd; 5560 5561 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5562 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5563 5564 return onOMXEmptyBufferDone(bufferID, fenceFd); 5565 } 5566 5567 case omx_message::FILL_BUFFER_DONE: 5568 { 5569 IOMX::buffer_id bufferID; 5570 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5571 5572 int32_t rangeOffset, rangeLength, flags, fenceFd; 5573 int64_t timeUs; 5574 5575 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5576 CHECK(msg->findInt32("range_length", &rangeLength)); 5577 CHECK(msg->findInt32("flags", &flags)); 5578 CHECK(msg->findInt64("timestamp", &timeUs)); 5579 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5580 5581 return onOMXFillBufferDone( 5582 bufferID, 5583 (size_t)rangeOffset, (size_t)rangeLength, 5584 (OMX_U32)flags, 5585 timeUs, 5586 fenceFd); 5587 } 5588 5589 case omx_message::FRAME_RENDERED: 5590 { 5591 int64_t mediaTimeUs, systemNano; 5592 5593 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5594 CHECK(msg->findInt64("system_nano", &systemNano)); 5595 5596 return onOMXFrameRendered( 5597 mediaTimeUs, systemNano); 5598 } 5599 5600 default: 5601 ALOGE("Unexpected message type: %d", type); 5602 return false; 5603 } 5604} 5605 5606bool ACodec::BaseState::onOMXFrameRendered( 5607 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5608 // ignore outside of Executing and PortSettingsChanged states 5609 return true; 5610} 5611 5612bool ACodec::BaseState::onOMXEvent( 5613 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5614 if (event == OMX_EventDataSpaceChanged) { 5615 ColorAspects aspects; 5616 aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF); 5617 aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF); 5618 aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF); 5619 aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF); 5620 5621 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5622 return true; 5623 } 5624 5625 if (event != OMX_EventError) { 5626 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5627 mCodec->mComponentName.c_str(), event, data1, data2); 5628 5629 return false; 5630 } 5631 5632 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5633 5634 // verify OMX component sends back an error we expect. 5635 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5636 if (!isOMXError(omxError)) { 5637 ALOGW("Invalid OMX error %#x", omxError); 5638 omxError = OMX_ErrorUndefined; 5639 } 5640 mCodec->signalError(omxError); 5641 5642 return true; 5643} 5644 5645bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5646 ALOGV("[%s] onOMXEmptyBufferDone %u", 5647 mCodec->mComponentName.c_str(), bufferID); 5648 5649 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5650 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5651 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5652 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5653 mCodec->dumpBuffers(kPortIndexInput); 5654 if (fenceFd >= 0) { 5655 ::close(fenceFd); 5656 } 5657 return false; 5658 } 5659 info->mStatus = BufferInfo::OWNED_BY_US; 5660 5661 // input buffers cannot take fences, so wait for any fence now 5662 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5663 fenceFd = -1; 5664 5665 // still save fence for completeness 5666 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5667 5668 // We're in "store-metadata-in-buffers" mode, the underlying 5669 // OMX component had access to data that's implicitly refcounted 5670 // by this "MediaBuffer" object. Now that the OMX component has 5671 // told us that it's done with the input buffer, we can decrement 5672 // the mediaBuffer's reference count. 5673 info->mData->setMediaBufferBase(NULL); 5674 5675 PortMode mode = getPortMode(kPortIndexInput); 5676 5677 switch (mode) { 5678 case KEEP_BUFFERS: 5679 break; 5680 5681 case RESUBMIT_BUFFERS: 5682 postFillThisBuffer(info); 5683 break; 5684 5685 case FREE_BUFFERS: 5686 default: 5687 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5688 return false; 5689 } 5690 5691 return true; 5692} 5693 5694void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5695 if (mCodec->mPortEOS[kPortIndexInput]) { 5696 return; 5697 } 5698 5699 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5700 5701 sp<AMessage> notify = mCodec->mNotify->dup(); 5702 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5703 notify->setInt32("buffer-id", info->mBufferID); 5704 5705 info->mData->meta()->clear(); 5706 notify->setBuffer("buffer", info->mData); 5707 5708 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5709 reply->setInt32("buffer-id", info->mBufferID); 5710 5711 notify->setMessage("reply", reply); 5712 5713 notify->post(); 5714 5715 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5716} 5717 5718void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5719 IOMX::buffer_id bufferID; 5720 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5721 sp<ABuffer> buffer; 5722 int32_t err = OK; 5723 bool eos = false; 5724 PortMode mode = getPortMode(kPortIndexInput); 5725 5726 if (!msg->findBuffer("buffer", &buffer)) { 5727 /* these are unfilled buffers returned by client */ 5728 CHECK(msg->findInt32("err", &err)); 5729 5730 if (err == OK) { 5731 /* buffers with no errors are returned on MediaCodec.flush */ 5732 mode = KEEP_BUFFERS; 5733 } else { 5734 ALOGV("[%s] saw error %d instead of an input buffer", 5735 mCodec->mComponentName.c_str(), err); 5736 eos = true; 5737 } 5738 5739 buffer.clear(); 5740 } 5741 5742 int32_t tmp; 5743 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5744 eos = true; 5745 err = ERROR_END_OF_STREAM; 5746 } 5747 5748 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5749 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5750 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5751 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5752 mCodec->dumpBuffers(kPortIndexInput); 5753 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5754 return; 5755 } 5756 5757 info->mStatus = BufferInfo::OWNED_BY_US; 5758 5759 switch (mode) { 5760 case KEEP_BUFFERS: 5761 { 5762 if (eos) { 5763 if (!mCodec->mPortEOS[kPortIndexInput]) { 5764 mCodec->mPortEOS[kPortIndexInput] = true; 5765 mCodec->mInputEOSResult = err; 5766 } 5767 } 5768 break; 5769 } 5770 5771 case RESUBMIT_BUFFERS: 5772 { 5773 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5774 // Do not send empty input buffer w/o EOS to the component. 5775 if (buffer->size() == 0 && !eos) { 5776 postFillThisBuffer(info); 5777 break; 5778 } 5779 5780 int64_t timeUs; 5781 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5782 5783 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5784 5785 MetadataBufferType metaType = mCodec->mInputMetadataType; 5786 int32_t isCSD = 0; 5787 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5788 if (mCodec->mIsLegacyVP9Decoder) { 5789 ALOGV("[%s] is legacy VP9 decoder. Ignore %u codec specific data", 5790 mCodec->mComponentName.c_str(), bufferID); 5791 postFillThisBuffer(info); 5792 break; 5793 } 5794 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5795 metaType = kMetadataBufferTypeInvalid; 5796 } 5797 5798 if (eos) { 5799 flags |= OMX_BUFFERFLAG_EOS; 5800 } 5801 5802 if (buffer != info->mCodecData) { 5803 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5804 mCodec->mComponentName.c_str(), 5805 bufferID, 5806 buffer.get(), info->mCodecData.get()); 5807 5808 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; 5809 if (converter == NULL || isCSD) { 5810 converter = getCopyConverter(); 5811 } 5812 status_t err = converter->convert(buffer, info->mCodecData); 5813 if (err != OK) { 5814 mCodec->signalError(OMX_ErrorUndefined, err); 5815 return; 5816 } 5817 } 5818 5819 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5820 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5821 mCodec->mComponentName.c_str(), bufferID); 5822 } else if (flags & OMX_BUFFERFLAG_EOS) { 5823 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5824 mCodec->mComponentName.c_str(), bufferID); 5825 } else { 5826#if TRACK_BUFFER_TIMING 5827 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5828 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5829#else 5830 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5831 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5832#endif 5833 } 5834 5835#if TRACK_BUFFER_TIMING 5836 ACodec::BufferStats stats; 5837 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5838 stats.mFillBufferDoneTimeUs = -1ll; 5839 mCodec->mBufferStats.add(timeUs, stats); 5840#endif 5841 5842 if (mCodec->storingMetadataInDecodedBuffers()) { 5843 // try to submit an output buffer for each input buffer 5844 PortMode outputMode = getPortMode(kPortIndexOutput); 5845 5846 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 5847 mCodec->mMetadataBuffersToSubmit, 5848 (outputMode == FREE_BUFFERS ? "FREE" : 5849 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 5850 if (outputMode == RESUBMIT_BUFFERS) { 5851 mCodec->submitOutputMetadataBuffer(); 5852 } 5853 } 5854 info->checkReadFence("onInputBufferFilled"); 5855 5856 status_t err2 = OK; 5857 switch (metaType) { 5858 case kMetadataBufferTypeInvalid: 5859 break; 5860#ifndef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5861 case kMetadataBufferTypeNativeHandleSource: 5862 if (info->mCodecData->size() >= sizeof(VideoNativeHandleMetadata)) { 5863 VideoNativeHandleMetadata *vnhmd = 5864 (VideoNativeHandleMetadata*)info->mCodecData->base(); 5865 err2 = mCodec->mOMX->updateNativeHandleInMeta( 5866 mCodec->mNode, kPortIndexInput, 5867 NativeHandle::create(vnhmd->pHandle, false /* ownsHandle */), 5868 bufferID); 5869 } 5870 break; 5871 case kMetadataBufferTypeANWBuffer: 5872 if (info->mCodecData->size() >= sizeof(VideoNativeMetadata)) { 5873 VideoNativeMetadata *vnmd = (VideoNativeMetadata*)info->mCodecData->base(); 5874 err2 = mCodec->mOMX->updateGraphicBufferInMeta( 5875 mCodec->mNode, kPortIndexInput, 5876 new GraphicBuffer(vnmd->pBuffer, false /* keepOwnership */), 5877 bufferID); 5878 } 5879 break; 5880#endif 5881 default: 5882 ALOGW("Can't marshall %s data in %zu sized buffers in %zu-bit mode", 5883 asString(metaType), info->mCodecData->size(), 5884 sizeof(buffer_handle_t) * 8); 5885 err2 = ERROR_UNSUPPORTED; 5886 break; 5887 } 5888 5889 if (err2 == OK) { 5890 err2 = mCodec->mOMX->emptyBuffer( 5891 mCodec->mNode, 5892 bufferID, 5893 0, 5894 info->mCodecData->size(), 5895 flags, 5896 timeUs, 5897 info->mFenceFd); 5898 } 5899 info->mFenceFd = -1; 5900 if (err2 != OK) { 5901 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5902 return; 5903 } 5904 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5905 5906 if (!eos && err == OK) { 5907 getMoreInputDataIfPossible(); 5908 } else { 5909 ALOGV("[%s] Signalled EOS (%d) on the input port", 5910 mCodec->mComponentName.c_str(), err); 5911 5912 mCodec->mPortEOS[kPortIndexInput] = true; 5913 mCodec->mInputEOSResult = err; 5914 } 5915 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 5916 if (err != OK && err != ERROR_END_OF_STREAM) { 5917 ALOGV("[%s] Signalling EOS on the input port due to error %d", 5918 mCodec->mComponentName.c_str(), err); 5919 } else { 5920 ALOGV("[%s] Signalling EOS on the input port", 5921 mCodec->mComponentName.c_str()); 5922 } 5923 5924 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 5925 mCodec->mComponentName.c_str(), bufferID); 5926 5927 info->checkReadFence("onInputBufferFilled"); 5928 status_t err2 = mCodec->mOMX->emptyBuffer( 5929 mCodec->mNode, 5930 bufferID, 5931 0, 5932 0, 5933 OMX_BUFFERFLAG_EOS, 5934 0, 5935 info->mFenceFd); 5936 info->mFenceFd = -1; 5937 if (err2 != OK) { 5938 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5939 return; 5940 } 5941 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5942 5943 mCodec->mPortEOS[kPortIndexInput] = true; 5944 mCodec->mInputEOSResult = err; 5945 } 5946 break; 5947 } 5948 5949 case FREE_BUFFERS: 5950 break; 5951 5952 default: 5953 ALOGE("invalid port mode: %d", mode); 5954 break; 5955 } 5956} 5957 5958void ACodec::BaseState::getMoreInputDataIfPossible() { 5959 if (mCodec->mPortEOS[kPortIndexInput]) { 5960 return; 5961 } 5962 5963 BufferInfo *eligible = NULL; 5964 5965 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 5966 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5967 5968#if 0 5969 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 5970 // There's already a "read" pending. 5971 return; 5972 } 5973#endif 5974 5975 if (info->mStatus == BufferInfo::OWNED_BY_US) { 5976 eligible = info; 5977 } 5978 } 5979 5980 if (eligible == NULL) { 5981 return; 5982 } 5983 5984 postFillThisBuffer(eligible); 5985} 5986 5987bool ACodec::BaseState::onOMXFillBufferDone( 5988 IOMX::buffer_id bufferID, 5989 size_t rangeOffset, size_t rangeLength, 5990 OMX_U32 flags, 5991 int64_t timeUs, 5992 int fenceFd) { 5993 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 5994 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 5995 5996 ssize_t index; 5997 status_t err= OK; 5998 5999#if TRACK_BUFFER_TIMING 6000 index = mCodec->mBufferStats.indexOfKey(timeUs); 6001 if (index >= 0) { 6002 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 6003 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 6004 6005 ALOGI("frame PTS %lld: %lld", 6006 timeUs, 6007 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 6008 6009 mCodec->mBufferStats.removeItemsAt(index); 6010 stats = NULL; 6011 } 6012#endif 6013 6014 BufferInfo *info = 6015 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6016 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6017 if (status != BufferInfo::OWNED_BY_COMPONENT) { 6018 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6019 mCodec->dumpBuffers(kPortIndexOutput); 6020 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6021 if (fenceFd >= 0) { 6022 ::close(fenceFd); 6023 } 6024 return true; 6025 } 6026 6027 info->mDequeuedAt = ++mCodec->mDequeueCounter; 6028 info->mStatus = BufferInfo::OWNED_BY_US; 6029 6030 if (info->mRenderInfo != NULL) { 6031 // The fence for an emptied buffer must have signaled, but there still could be queued 6032 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 6033 // as we will soon requeue this buffer to the surface. While in theory we could still keep 6034 // track of buffers that are requeued to the surface, it is better to add support to the 6035 // buffer-queue to notify us of released buffers and their fences (in the future). 6036 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 6037 } 6038 6039 // byte buffers cannot take fences, so wait for any fence now 6040 if (mCodec->mNativeWindow == NULL) { 6041 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 6042 fenceFd = -1; 6043 } 6044 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 6045 6046 PortMode mode = getPortMode(kPortIndexOutput); 6047 6048 switch (mode) { 6049 case KEEP_BUFFERS: 6050 break; 6051 6052 case RESUBMIT_BUFFERS: 6053 { 6054 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 6055 || mCodec->mPortEOS[kPortIndexOutput])) { 6056 ALOGV("[%s] calling fillBuffer %u", 6057 mCodec->mComponentName.c_str(), info->mBufferID); 6058 6059 err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 6060 info->mFenceFd = -1; 6061 if (err != OK) { 6062 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6063 return true; 6064 } 6065 6066 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6067 break; 6068 } 6069 6070 sp<AMessage> reply = 6071 new AMessage(kWhatOutputBufferDrained, mCodec); 6072 6073 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 6074 // pretend that output format has changed on the first frame (we used to do this) 6075 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) { 6076 mCodec->onOutputFormatChanged(mCodec->mOutputFormat); 6077 } 6078 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 6079 mCodec->sendFormatChange(); 6080 } else if (rangeLength > 0 && mCodec->mNativeWindow != NULL) { 6081 // If potentially rendering onto a surface, always save key format data (crop & 6082 // data space) so that we can set it if and once the buffer is rendered. 6083 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 6084 } 6085 6086 if (mCodec->usingMetadataOnEncoderOutput()) { 6087 native_handle_t *handle = NULL; 6088 VideoNativeHandleMetadata &nativeMeta = 6089 *(VideoNativeHandleMetadata *)info->mData->data(); 6090 if (info->mData->size() >= sizeof(nativeMeta) 6091 && nativeMeta.eType == kMetadataBufferTypeNativeHandleSource) { 6092#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 6093 // handle is only valid on 32-bit/mediaserver process 6094 handle = NULL; 6095#else 6096 handle = (native_handle_t *)nativeMeta.pHandle; 6097#endif 6098 } 6099 info->mData->meta()->setPointer("handle", handle); 6100 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 6101 info->mData->meta()->setInt32("rangeLength", rangeLength); 6102 } else if (info->mData == info->mCodecData) { 6103 info->mData->setRange(rangeOffset, rangeLength); 6104 } else { 6105 info->mCodecData->setRange(rangeOffset, rangeLength); 6106 // in this case we know that mConverter is not null 6107 status_t err = mCodec->mConverter[kPortIndexOutput]->convert( 6108 info->mCodecData, info->mData); 6109 if (err != OK) { 6110 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6111 return true; 6112 } 6113 } 6114#if 0 6115 if (mCodec->mNativeWindow == NULL) { 6116 if (IsIDR(info->mData)) { 6117 ALOGI("IDR frame"); 6118 } 6119 } 6120#endif 6121 6122 if (mCodec->mSkipCutBuffer != NULL) { 6123 mCodec->mSkipCutBuffer->submit(info->mData); 6124 } 6125 info->mData->meta()->setInt64("timeUs", timeUs); 6126 6127 sp<AMessage> notify = mCodec->mNotify->dup(); 6128 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 6129 notify->setInt32("buffer-id", info->mBufferID); 6130 notify->setBuffer("buffer", info->mData); 6131 notify->setInt32("flags", flags); 6132 6133 reply->setInt32("buffer-id", info->mBufferID); 6134 6135 notify->setMessage("reply", reply); 6136 6137 notify->post(); 6138 6139 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 6140 6141 if (flags & OMX_BUFFERFLAG_EOS) { 6142 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 6143 6144 sp<AMessage> notify = mCodec->mNotify->dup(); 6145 notify->setInt32("what", CodecBase::kWhatEOS); 6146 notify->setInt32("err", mCodec->mInputEOSResult); 6147 notify->post(); 6148 6149 mCodec->mPortEOS[kPortIndexOutput] = true; 6150 } 6151 break; 6152 } 6153 6154 case FREE_BUFFERS: 6155 err = mCodec->freeBuffer(kPortIndexOutput, index); 6156 if (err != OK) { 6157 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6158 return true; 6159 } 6160 break; 6161 6162 default: 6163 ALOGE("Invalid port mode: %d", mode); 6164 return false; 6165 } 6166 6167 return true; 6168} 6169 6170void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 6171 IOMX::buffer_id bufferID; 6172 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 6173 ssize_t index; 6174 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6175 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6176 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 6177 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6178 mCodec->dumpBuffers(kPortIndexOutput); 6179 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6180 return; 6181 } 6182 6183 android_native_rect_t crop; 6184 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom) 6185 && memcmp(&crop, &mCodec->mLastNativeWindowCrop, sizeof(crop)) != 0) { 6186 mCodec->mLastNativeWindowCrop = crop; 6187 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 6188 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 6189 } 6190 6191 int32_t dataSpace; 6192 if (msg->findInt32("dataspace", &dataSpace) 6193 && dataSpace != mCodec->mLastNativeWindowDataSpace) { 6194 status_t err = native_window_set_buffers_data_space( 6195 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 6196 mCodec->mLastNativeWindowDataSpace = dataSpace; 6197 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 6198 } 6199 6200 int32_t render; 6201 if (mCodec->mNativeWindow != NULL 6202 && msg->findInt32("render", &render) && render != 0 6203 && info->mData != NULL && info->mData->size() != 0) { 6204 ATRACE_NAME("render"); 6205 // The client wants this buffer to be rendered. 6206 6207 // save buffers sent to the surface so we can get render time when they return 6208 int64_t mediaTimeUs = -1; 6209 info->mData->meta()->findInt64("timeUs", &mediaTimeUs); 6210 if (mediaTimeUs >= 0) { 6211 mCodec->mRenderTracker.onFrameQueued( 6212 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 6213 } 6214 6215 int64_t timestampNs = 0; 6216 if (!msg->findInt64("timestampNs", ×tampNs)) { 6217 // use media timestamp if client did not request a specific render timestamp 6218 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 6219 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 6220 timestampNs *= 1000; 6221 } 6222 } 6223 6224 status_t err; 6225 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 6226 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 6227 6228 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 6229 err = mCodec->mNativeWindow->queueBuffer( 6230 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 6231 info->mFenceFd = -1; 6232 if (err == OK) { 6233 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 6234 } else { 6235 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 6236 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6237 info->mStatus = BufferInfo::OWNED_BY_US; 6238 // keeping read fence as write fence to avoid clobbering 6239 info->mIsReadFence = false; 6240 } 6241 } else { 6242 if (mCodec->mNativeWindow != NULL && 6243 (info->mData == NULL || info->mData->size() != 0)) { 6244 // move read fence into write fence to avoid clobbering 6245 info->mIsReadFence = false; 6246 ATRACE_NAME("frame-drop"); 6247 } 6248 info->mStatus = BufferInfo::OWNED_BY_US; 6249 } 6250 6251 PortMode mode = getPortMode(kPortIndexOutput); 6252 6253 switch (mode) { 6254 case KEEP_BUFFERS: 6255 { 6256 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 6257 6258 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6259 // We cannot resubmit the buffer we just rendered, dequeue 6260 // the spare instead. 6261 6262 info = mCodec->dequeueBufferFromNativeWindow(); 6263 } 6264 break; 6265 } 6266 6267 case RESUBMIT_BUFFERS: 6268 { 6269 if (!mCodec->mPortEOS[kPortIndexOutput]) { 6270 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6271 // We cannot resubmit the buffer we just rendered, dequeue 6272 // the spare instead. 6273 6274 info = mCodec->dequeueBufferFromNativeWindow(); 6275 } 6276 6277 if (info != NULL) { 6278 ALOGV("[%s] calling fillBuffer %u", 6279 mCodec->mComponentName.c_str(), info->mBufferID); 6280 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 6281 status_t err = mCodec->mOMX->fillBuffer( 6282 mCodec->mNode, info->mBufferID, info->mFenceFd); 6283 info->mFenceFd = -1; 6284 if (err == OK) { 6285 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6286 } else { 6287 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6288 } 6289 } 6290 } 6291 break; 6292 } 6293 6294 case FREE_BUFFERS: 6295 { 6296 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 6297 if (err != OK) { 6298 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6299 } 6300 break; 6301 } 6302 6303 default: 6304 ALOGE("Invalid port mode: %d", mode); 6305 return; 6306 } 6307} 6308 6309//////////////////////////////////////////////////////////////////////////////// 6310 6311ACodec::UninitializedState::UninitializedState(ACodec *codec) 6312 : BaseState(codec) { 6313} 6314 6315void ACodec::UninitializedState::stateEntered() { 6316 ALOGV("Now uninitialized"); 6317 6318 if (mDeathNotifier != NULL) { 6319 mCodec->mNodeBinder->unlinkToDeath(mDeathNotifier); 6320 mDeathNotifier.clear(); 6321 } 6322 6323 mCodec->mUsingNativeWindow = false; 6324 mCodec->mNativeWindow.clear(); 6325 mCodec->mNativeWindowUsageBits = 0; 6326 mCodec->mNode = 0; 6327 mCodec->mOMX.clear(); 6328 mCodec->mQuirks = 0; 6329 mCodec->mFlags = 0; 6330 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 6331 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 6332 mCodec->mConverter[0].clear(); 6333 mCodec->mConverter[1].clear(); 6334 mCodec->mComponentName.clear(); 6335} 6336 6337bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 6338 bool handled = false; 6339 6340 switch (msg->what()) { 6341 case ACodec::kWhatSetup: 6342 { 6343 onSetup(msg); 6344 6345 handled = true; 6346 break; 6347 } 6348 6349 case ACodec::kWhatAllocateComponent: 6350 { 6351 onAllocateComponent(msg); 6352 handled = true; 6353 break; 6354 } 6355 6356 case ACodec::kWhatShutdown: 6357 { 6358 int32_t keepComponentAllocated; 6359 CHECK(msg->findInt32( 6360 "keepComponentAllocated", &keepComponentAllocated)); 6361 ALOGW_IF(keepComponentAllocated, 6362 "cannot keep component allocated on shutdown in Uninitialized state"); 6363 6364 sp<AMessage> notify = mCodec->mNotify->dup(); 6365 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6366 notify->post(); 6367 6368 handled = true; 6369 break; 6370 } 6371 6372 case ACodec::kWhatFlush: 6373 { 6374 sp<AMessage> notify = mCodec->mNotify->dup(); 6375 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6376 notify->post(); 6377 6378 handled = true; 6379 break; 6380 } 6381 6382 case ACodec::kWhatReleaseCodecInstance: 6383 { 6384 // nothing to do, as we have already signaled shutdown 6385 handled = true; 6386 break; 6387 } 6388 6389 default: 6390 return BaseState::onMessageReceived(msg); 6391 } 6392 6393 return handled; 6394} 6395 6396void ACodec::UninitializedState::onSetup( 6397 const sp<AMessage> &msg) { 6398 if (onAllocateComponent(msg) 6399 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6400 mCodec->mLoadedState->onStart(); 6401 } 6402} 6403 6404bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6405 ALOGV("onAllocateComponent"); 6406 6407 CHECK(mCodec->mNode == 0); 6408 6409 OMXClient client; 6410 if (client.connect() != OK) { 6411 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6412 return false; 6413 } 6414 6415 sp<IOMX> omx = client.interface(); 6416 6417 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6418 6419 Vector<AString> matchingCodecs; 6420 6421 AString mime; 6422 6423 AString componentName; 6424 uint32_t quirks = 0; 6425 int32_t encoder = false; 6426 if (msg->findString("componentName", &componentName)) { 6427 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6428 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6429 matchingCodecs.add(componentName); 6430 } 6431 } else { 6432 CHECK(msg->findString("mime", &mime)); 6433 6434 if (!msg->findInt32("encoder", &encoder)) { 6435 encoder = false; 6436 } 6437 6438 MediaCodecList::findMatchingCodecs( 6439 mime.c_str(), 6440 encoder, // createEncoder 6441 0, // flags 6442 &matchingCodecs); 6443 } 6444 6445 sp<CodecObserver> observer = new CodecObserver; 6446 IOMX::node_id node = 0; 6447 6448 status_t err = NAME_NOT_FOUND; 6449 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6450 ++matchIndex) { 6451 componentName = matchingCodecs[matchIndex]; 6452 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6453 6454 pid_t tid = gettid(); 6455 int prevPriority = androidGetThreadPriority(tid); 6456 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6457 err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node); 6458 androidSetThreadPriority(tid, prevPriority); 6459 6460 if (err == OK) { 6461 break; 6462 } else { 6463 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6464 } 6465 6466 node = 0; 6467 } 6468 6469 if (node == 0) { 6470 if (!mime.empty()) { 6471 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6472 encoder ? "en" : "de", mime.c_str(), err); 6473 } else { 6474 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6475 } 6476 6477 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6478 return false; 6479 } 6480 6481 mDeathNotifier = new DeathNotifier(notify); 6482 if (mCodec->mNodeBinder == NULL || 6483 mCodec->mNodeBinder->linkToDeath(mDeathNotifier) != OK) { 6484 // This was a local binder, if it dies so do we, we won't care 6485 // about any notifications in the afterlife. 6486 mDeathNotifier.clear(); 6487 } 6488 6489 notify = new AMessage(kWhatOMXMessageList, mCodec); 6490 observer->setNotificationMessage(notify); 6491 6492 mCodec->mComponentName = componentName; 6493 mCodec->mRenderTracker.setComponentName(componentName); 6494 mCodec->mFlags = 0; 6495 6496 if (componentName.endsWith(".secure")) { 6497 mCodec->mFlags |= kFlagIsSecure; 6498 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6499 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6500 } 6501 6502 mCodec->mQuirks = quirks; 6503 mCodec->mOMX = omx; 6504 mCodec->mNode = node; 6505 6506 { 6507 sp<AMessage> notify = mCodec->mNotify->dup(); 6508 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6509 notify->setString("componentName", mCodec->mComponentName.c_str()); 6510 notify->post(); 6511 } 6512 6513 mCodec->changeState(mCodec->mLoadedState); 6514 6515 return true; 6516} 6517 6518//////////////////////////////////////////////////////////////////////////////// 6519 6520ACodec::LoadedState::LoadedState(ACodec *codec) 6521 : BaseState(codec) { 6522} 6523 6524void ACodec::LoadedState::stateEntered() { 6525 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6526 6527 mCodec->mPortEOS[kPortIndexInput] = 6528 mCodec->mPortEOS[kPortIndexOutput] = false; 6529 6530 mCodec->mInputEOSResult = OK; 6531 6532 mCodec->mDequeueCounter = 0; 6533 mCodec->mMetadataBuffersToSubmit = 0; 6534 mCodec->mRepeatFrameDelayUs = -1ll; 6535 mCodec->mInputFormat.clear(); 6536 mCodec->mOutputFormat.clear(); 6537 mCodec->mBaseOutputFormat.clear(); 6538 6539 if (mCodec->mShutdownInProgress) { 6540 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6541 6542 mCodec->mShutdownInProgress = false; 6543 mCodec->mKeepComponentAllocated = false; 6544 6545 onShutdown(keepComponentAllocated); 6546 } 6547 mCodec->mExplicitShutdown = false; 6548 6549 mCodec->processDeferredMessages(); 6550} 6551 6552void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6553 if (!keepComponentAllocated) { 6554 (void)mCodec->mOMX->freeNode(mCodec->mNode); 6555 6556 mCodec->changeState(mCodec->mUninitializedState); 6557 } 6558 6559 if (mCodec->mExplicitShutdown) { 6560 sp<AMessage> notify = mCodec->mNotify->dup(); 6561 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6562 notify->post(); 6563 mCodec->mExplicitShutdown = false; 6564 } 6565} 6566 6567bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6568 bool handled = false; 6569 6570 switch (msg->what()) { 6571 case ACodec::kWhatConfigureComponent: 6572 { 6573 onConfigureComponent(msg); 6574 handled = true; 6575 break; 6576 } 6577 6578 case ACodec::kWhatCreateInputSurface: 6579 { 6580 onCreateInputSurface(msg); 6581 handled = true; 6582 break; 6583 } 6584 6585 case ACodec::kWhatSetInputSurface: 6586 { 6587 onSetInputSurface(msg); 6588 handled = true; 6589 break; 6590 } 6591 6592 case ACodec::kWhatStart: 6593 { 6594 onStart(); 6595 handled = true; 6596 break; 6597 } 6598 6599 case ACodec::kWhatShutdown: 6600 { 6601 int32_t keepComponentAllocated; 6602 CHECK(msg->findInt32( 6603 "keepComponentAllocated", &keepComponentAllocated)); 6604 6605 mCodec->mExplicitShutdown = true; 6606 onShutdown(keepComponentAllocated); 6607 6608 handled = true; 6609 break; 6610 } 6611 6612 case ACodec::kWhatFlush: 6613 { 6614 sp<AMessage> notify = mCodec->mNotify->dup(); 6615 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6616 notify->post(); 6617 6618 handled = true; 6619 break; 6620 } 6621 6622 default: 6623 return BaseState::onMessageReceived(msg); 6624 } 6625 6626 return handled; 6627} 6628 6629bool ACodec::LoadedState::onConfigureComponent( 6630 const sp<AMessage> &msg) { 6631 ALOGV("onConfigureComponent"); 6632 6633 CHECK(mCodec->mNode != 0); 6634 6635 status_t err = OK; 6636 AString mime; 6637 if (!msg->findString("mime", &mime)) { 6638 err = BAD_VALUE; 6639 } else { 6640 err = mCodec->configureCodec(mime.c_str(), msg); 6641 } 6642 if (err != OK) { 6643 ALOGE("[%s] configureCodec returning error %d", 6644 mCodec->mComponentName.c_str(), err); 6645 6646 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6647 return false; 6648 } 6649 6650 { 6651 sp<AMessage> notify = mCodec->mNotify->dup(); 6652 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6653 notify->setMessage("input-format", mCodec->mInputFormat); 6654 notify->setMessage("output-format", mCodec->mOutputFormat); 6655 notify->post(); 6656 } 6657 6658 return true; 6659} 6660 6661status_t ACodec::LoadedState::setupInputSurface() { 6662 status_t err = OK; 6663 6664 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6665 err = mCodec->mOMX->setInternalOption( 6666 mCodec->mNode, 6667 kPortIndexInput, 6668 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 6669 &mCodec->mRepeatFrameDelayUs, 6670 sizeof(mCodec->mRepeatFrameDelayUs)); 6671 6672 if (err != OK) { 6673 ALOGE("[%s] Unable to configure option to repeat previous " 6674 "frames (err %d)", 6675 mCodec->mComponentName.c_str(), 6676 err); 6677 return err; 6678 } 6679 } 6680 6681 if (mCodec->mMaxPtsGapUs > 0ll) { 6682 err = mCodec->mOMX->setInternalOption( 6683 mCodec->mNode, 6684 kPortIndexInput, 6685 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 6686 &mCodec->mMaxPtsGapUs, 6687 sizeof(mCodec->mMaxPtsGapUs)); 6688 6689 if (err != OK) { 6690 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6691 mCodec->mComponentName.c_str(), 6692 err); 6693 return err; 6694 } 6695 } 6696 6697 if (mCodec->mMaxFps > 0) { 6698 err = mCodec->mOMX->setInternalOption( 6699 mCodec->mNode, 6700 kPortIndexInput, 6701 IOMX::INTERNAL_OPTION_MAX_FPS, 6702 &mCodec->mMaxFps, 6703 sizeof(mCodec->mMaxFps)); 6704 6705 if (err != OK) { 6706 ALOGE("[%s] Unable to configure max fps (err %d)", 6707 mCodec->mComponentName.c_str(), 6708 err); 6709 return err; 6710 } 6711 } 6712 6713 if (mCodec->mTimePerCaptureUs > 0ll 6714 && mCodec->mTimePerFrameUs > 0ll) { 6715 int64_t timeLapse[2]; 6716 timeLapse[0] = mCodec->mTimePerFrameUs; 6717 timeLapse[1] = mCodec->mTimePerCaptureUs; 6718 err = mCodec->mOMX->setInternalOption( 6719 mCodec->mNode, 6720 kPortIndexInput, 6721 IOMX::INTERNAL_OPTION_TIME_LAPSE, 6722 &timeLapse[0], 6723 sizeof(timeLapse)); 6724 6725 if (err != OK) { 6726 ALOGE("[%s] Unable to configure time lapse (err %d)", 6727 mCodec->mComponentName.c_str(), 6728 err); 6729 return err; 6730 } 6731 } 6732 6733 if (mCodec->mCreateInputBuffersSuspended) { 6734 bool suspend = true; 6735 err = mCodec->mOMX->setInternalOption( 6736 mCodec->mNode, 6737 kPortIndexInput, 6738 IOMX::INTERNAL_OPTION_SUSPEND, 6739 &suspend, 6740 sizeof(suspend)); 6741 6742 if (err != OK) { 6743 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6744 mCodec->mComponentName.c_str(), 6745 err); 6746 return err; 6747 } 6748 } 6749 6750 uint32_t usageBits; 6751 if (mCodec->mOMX->getParameter( 6752 mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6753 &usageBits, sizeof(usageBits)) == OK) { 6754 mCodec->mInputFormat->setInt32( 6755 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6756 } 6757 6758 sp<ABuffer> colorAspectsBuffer; 6759 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6760 err = mCodec->mOMX->setInternalOption( 6761 mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS, 6762 colorAspectsBuffer->base(), colorAspectsBuffer->capacity()); 6763 if (err != OK) { 6764 ALOGE("[%s] Unable to configure color aspects (err %d)", 6765 mCodec->mComponentName.c_str(), err); 6766 return err; 6767 } 6768 } 6769 return OK; 6770} 6771 6772void ACodec::LoadedState::onCreateInputSurface( 6773 const sp<AMessage> & /* msg */) { 6774 ALOGV("onCreateInputSurface"); 6775 6776 sp<AMessage> notify = mCodec->mNotify->dup(); 6777 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6778 6779 android_dataspace dataSpace; 6780 status_t err = 6781 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6782 notify->setMessage("input-format", mCodec->mInputFormat); 6783 notify->setMessage("output-format", mCodec->mOutputFormat); 6784 6785 sp<IGraphicBufferProducer> bufferProducer; 6786 if (err == OK) { 6787 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6788 err = mCodec->mOMX->createInputSurface( 6789 mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, 6790 &mCodec->mInputMetadataType); 6791 // framework uses ANW buffers internally instead of gralloc handles 6792 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6793 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6794 } 6795 } 6796 6797 if (err == OK) { 6798 err = setupInputSurface(); 6799 } 6800 6801 if (err == OK) { 6802 notify->setObject("input-surface", 6803 new BufferProducerWrapper(bufferProducer)); 6804 } else { 6805 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6806 // the error through because it's in the "configured" state. We 6807 // send a kWhatInputSurfaceCreated with an error value instead. 6808 ALOGE("[%s] onCreateInputSurface returning error %d", 6809 mCodec->mComponentName.c_str(), err); 6810 notify->setInt32("err", err); 6811 } 6812 notify->post(); 6813} 6814 6815void ACodec::LoadedState::onSetInputSurface( 6816 const sp<AMessage> &msg) { 6817 ALOGV("onSetInputSurface"); 6818 6819 sp<AMessage> notify = mCodec->mNotify->dup(); 6820 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6821 6822 sp<RefBase> obj; 6823 CHECK(msg->findObject("input-surface", &obj)); 6824 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6825 6826 android_dataspace dataSpace; 6827 status_t err = 6828 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6829 notify->setMessage("input-format", mCodec->mInputFormat); 6830 notify->setMessage("output-format", mCodec->mOutputFormat); 6831 6832 if (err == OK) { 6833 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6834 err = mCodec->mOMX->setInputSurface( 6835 mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(), 6836 &mCodec->mInputMetadataType); 6837 // framework uses ANW buffers internally instead of gralloc handles 6838 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6839 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6840 } 6841 } 6842 6843 if (err == OK) { 6844 surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace); 6845 err = setupInputSurface(); 6846 } 6847 6848 if (err != OK) { 6849 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6850 // the error through because it's in the "configured" state. We 6851 // send a kWhatInputSurfaceAccepted with an error value instead. 6852 ALOGE("[%s] onSetInputSurface returning error %d", 6853 mCodec->mComponentName.c_str(), err); 6854 notify->setInt32("err", err); 6855 } 6856 notify->post(); 6857} 6858 6859void ACodec::LoadedState::onStart() { 6860 ALOGV("onStart"); 6861 6862 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6863 if (err != OK) { 6864 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6865 } else { 6866 mCodec->changeState(mCodec->mLoadedToIdleState); 6867 } 6868} 6869 6870//////////////////////////////////////////////////////////////////////////////// 6871 6872ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 6873 : BaseState(codec) { 6874} 6875 6876void ACodec::LoadedToIdleState::stateEntered() { 6877 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 6878 6879 status_t err; 6880 if ((err = allocateBuffers()) != OK) { 6881 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 6882 "(error 0x%08x)", 6883 err); 6884 6885 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6886 6887 mCodec->mOMX->sendCommand( 6888 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 6889 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 6890 mCodec->freeBuffersOnPort(kPortIndexInput); 6891 } 6892 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 6893 mCodec->freeBuffersOnPort(kPortIndexOutput); 6894 } 6895 6896 mCodec->changeState(mCodec->mLoadedState); 6897 } 6898} 6899 6900status_t ACodec::LoadedToIdleState::allocateBuffers() { 6901 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 6902 6903 if (err != OK) { 6904 return err; 6905 } 6906 6907 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 6908} 6909 6910bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 6911 switch (msg->what()) { 6912 case kWhatSetParameters: 6913 case kWhatShutdown: 6914 { 6915 mCodec->deferMessage(msg); 6916 return true; 6917 } 6918 6919 case kWhatSignalEndOfInputStream: 6920 { 6921 mCodec->onSignalEndOfInputStream(); 6922 return true; 6923 } 6924 6925 case kWhatResume: 6926 { 6927 // We'll be active soon enough. 6928 return true; 6929 } 6930 6931 case kWhatFlush: 6932 { 6933 // We haven't even started yet, so we're flushed alright... 6934 sp<AMessage> notify = mCodec->mNotify->dup(); 6935 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6936 notify->post(); 6937 return true; 6938 } 6939 6940 default: 6941 return BaseState::onMessageReceived(msg); 6942 } 6943} 6944 6945bool ACodec::LoadedToIdleState::onOMXEvent( 6946 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6947 switch (event) { 6948 case OMX_EventCmdComplete: 6949 { 6950 status_t err = OK; 6951 if (data1 != (OMX_U32)OMX_CommandStateSet 6952 || data2 != (OMX_U32)OMX_StateIdle) { 6953 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 6954 asString((OMX_COMMANDTYPE)data1), data1, 6955 asString((OMX_STATETYPE)data2), data2); 6956 err = FAILED_TRANSACTION; 6957 } 6958 6959 if (err == OK) { 6960 err = mCodec->mOMX->sendCommand( 6961 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting); 6962 } 6963 6964 if (err != OK) { 6965 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6966 } else { 6967 mCodec->changeState(mCodec->mIdleToExecutingState); 6968 } 6969 6970 return true; 6971 } 6972 6973 default: 6974 return BaseState::onOMXEvent(event, data1, data2); 6975 } 6976} 6977 6978//////////////////////////////////////////////////////////////////////////////// 6979 6980ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 6981 : BaseState(codec) { 6982} 6983 6984void ACodec::IdleToExecutingState::stateEntered() { 6985 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 6986} 6987 6988bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6989 switch (msg->what()) { 6990 case kWhatSetParameters: 6991 case kWhatShutdown: 6992 { 6993 mCodec->deferMessage(msg); 6994 return true; 6995 } 6996 6997 case kWhatResume: 6998 { 6999 // We'll be active soon enough. 7000 return true; 7001 } 7002 7003 case kWhatFlush: 7004 { 7005 // We haven't even started yet, so we're flushed alright... 7006 sp<AMessage> notify = mCodec->mNotify->dup(); 7007 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7008 notify->post(); 7009 7010 return true; 7011 } 7012 7013 case kWhatSignalEndOfInputStream: 7014 { 7015 mCodec->onSignalEndOfInputStream(); 7016 return true; 7017 } 7018 7019 default: 7020 return BaseState::onMessageReceived(msg); 7021 } 7022} 7023 7024bool ACodec::IdleToExecutingState::onOMXEvent( 7025 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7026 switch (event) { 7027 case OMX_EventCmdComplete: 7028 { 7029 if (data1 != (OMX_U32)OMX_CommandStateSet 7030 || data2 != (OMX_U32)OMX_StateExecuting) { 7031 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 7032 asString((OMX_COMMANDTYPE)data1), data1, 7033 asString((OMX_STATETYPE)data2), data2); 7034 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7035 return true; 7036 } 7037 7038 mCodec->mExecutingState->resume(); 7039 mCodec->changeState(mCodec->mExecutingState); 7040 7041 return true; 7042 } 7043 7044 default: 7045 return BaseState::onOMXEvent(event, data1, data2); 7046 } 7047} 7048 7049//////////////////////////////////////////////////////////////////////////////// 7050 7051ACodec::ExecutingState::ExecutingState(ACodec *codec) 7052 : BaseState(codec), 7053 mActive(false) { 7054} 7055 7056ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 7057 OMX_U32 /* portIndex */) { 7058 return RESUBMIT_BUFFERS; 7059} 7060 7061void ACodec::ExecutingState::submitOutputMetaBuffers() { 7062 // submit as many buffers as there are input buffers with the codec 7063 // in case we are in port reconfiguring 7064 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 7065 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7066 7067 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 7068 if (mCodec->submitOutputMetadataBuffer() != OK) 7069 break; 7070 } 7071 } 7072 7073 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7074 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7075} 7076 7077void ACodec::ExecutingState::submitRegularOutputBuffers() { 7078 bool failed = false; 7079 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 7080 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 7081 7082 if (mCodec->mNativeWindow != NULL) { 7083 if (info->mStatus != BufferInfo::OWNED_BY_US 7084 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 7085 ALOGE("buffers should be owned by us or the surface"); 7086 failed = true; 7087 break; 7088 } 7089 7090 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 7091 continue; 7092 } 7093 } else { 7094 if (info->mStatus != BufferInfo::OWNED_BY_US) { 7095 ALOGE("buffers should be owned by us"); 7096 failed = true; 7097 break; 7098 } 7099 } 7100 7101 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 7102 7103 info->checkWriteFence("submitRegularOutputBuffers"); 7104 status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 7105 info->mFenceFd = -1; 7106 if (err != OK) { 7107 failed = true; 7108 break; 7109 } 7110 7111 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 7112 } 7113 7114 if (failed) { 7115 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7116 } 7117} 7118 7119void ACodec::ExecutingState::submitOutputBuffers() { 7120 submitRegularOutputBuffers(); 7121 if (mCodec->storingMetadataInDecodedBuffers()) { 7122 submitOutputMetaBuffers(); 7123 } 7124} 7125 7126void ACodec::ExecutingState::resume() { 7127 if (mActive) { 7128 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 7129 return; 7130 } 7131 7132 submitOutputBuffers(); 7133 7134 // Post all available input buffers 7135 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 7136 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 7137 } 7138 7139 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 7140 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7141 if (info->mStatus == BufferInfo::OWNED_BY_US) { 7142 postFillThisBuffer(info); 7143 } 7144 } 7145 7146 mActive = true; 7147} 7148 7149void ACodec::ExecutingState::stateEntered() { 7150 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 7151 7152 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7153 mCodec->processDeferredMessages(); 7154} 7155 7156bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 7157 bool handled = false; 7158 7159 switch (msg->what()) { 7160 case kWhatShutdown: 7161 { 7162 int32_t keepComponentAllocated; 7163 CHECK(msg->findInt32( 7164 "keepComponentAllocated", &keepComponentAllocated)); 7165 7166 mCodec->mShutdownInProgress = true; 7167 mCodec->mExplicitShutdown = true; 7168 mCodec->mKeepComponentAllocated = keepComponentAllocated; 7169 7170 mActive = false; 7171 7172 status_t err = mCodec->mOMX->sendCommand( 7173 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 7174 if (err != OK) { 7175 if (keepComponentAllocated) { 7176 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7177 } 7178 // TODO: do some recovery here. 7179 } else { 7180 mCodec->changeState(mCodec->mExecutingToIdleState); 7181 } 7182 7183 handled = true; 7184 break; 7185 } 7186 7187 case kWhatFlush: 7188 { 7189 ALOGV("[%s] ExecutingState flushing now " 7190 "(codec owns %zu/%zu input, %zu/%zu output).", 7191 mCodec->mComponentName.c_str(), 7192 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 7193 mCodec->mBuffers[kPortIndexInput].size(), 7194 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 7195 mCodec->mBuffers[kPortIndexOutput].size()); 7196 7197 mActive = false; 7198 7199 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL); 7200 if (err != OK) { 7201 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7202 } else { 7203 mCodec->changeState(mCodec->mFlushingState); 7204 } 7205 7206 handled = true; 7207 break; 7208 } 7209 7210 case kWhatResume: 7211 { 7212 resume(); 7213 7214 handled = true; 7215 break; 7216 } 7217 7218 case kWhatRequestIDRFrame: 7219 { 7220 status_t err = mCodec->requestIDRFrame(); 7221 if (err != OK) { 7222 ALOGW("Requesting an IDR frame failed."); 7223 } 7224 7225 handled = true; 7226 break; 7227 } 7228 7229 case kWhatSetParameters: 7230 { 7231 sp<AMessage> params; 7232 CHECK(msg->findMessage("params", ¶ms)); 7233 7234 status_t err = mCodec->setParameters(params); 7235 7236 sp<AMessage> reply; 7237 if (msg->findMessage("reply", &reply)) { 7238 reply->setInt32("err", err); 7239 reply->post(); 7240 } 7241 7242 handled = true; 7243 break; 7244 } 7245 7246 case ACodec::kWhatSignalEndOfInputStream: 7247 { 7248 mCodec->onSignalEndOfInputStream(); 7249 handled = true; 7250 break; 7251 } 7252 7253 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7254 case kWhatSubmitOutputMetadataBufferIfEOS: 7255 { 7256 if (mCodec->mPortEOS[kPortIndexInput] && 7257 !mCodec->mPortEOS[kPortIndexOutput]) { 7258 status_t err = mCodec->submitOutputMetadataBuffer(); 7259 if (err == OK) { 7260 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7261 } 7262 } 7263 return true; 7264 } 7265 7266 default: 7267 handled = BaseState::onMessageReceived(msg); 7268 break; 7269 } 7270 7271 return handled; 7272} 7273 7274status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 7275 int32_t videoBitrate; 7276 if (params->findInt32("video-bitrate", &videoBitrate)) { 7277 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 7278 InitOMXParams(&configParams); 7279 configParams.nPortIndex = kPortIndexOutput; 7280 configParams.nEncodeBitrate = videoBitrate; 7281 7282 status_t err = mOMX->setConfig( 7283 mNode, 7284 OMX_IndexConfigVideoBitrate, 7285 &configParams, 7286 sizeof(configParams)); 7287 7288 if (err != OK) { 7289 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 7290 videoBitrate, err); 7291 7292 return err; 7293 } 7294 } 7295 7296 int64_t skipFramesBeforeUs; 7297 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 7298 status_t err = 7299 mOMX->setInternalOption( 7300 mNode, 7301 kPortIndexInput, 7302 IOMX::INTERNAL_OPTION_START_TIME, 7303 &skipFramesBeforeUs, 7304 sizeof(skipFramesBeforeUs)); 7305 7306 if (err != OK) { 7307 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 7308 return err; 7309 } 7310 } 7311 7312 int32_t dropInputFrames; 7313 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 7314 bool suspend = dropInputFrames != 0; 7315 7316 status_t err = 7317 mOMX->setInternalOption( 7318 mNode, 7319 kPortIndexInput, 7320 IOMX::INTERNAL_OPTION_SUSPEND, 7321 &suspend, 7322 sizeof(suspend)); 7323 7324 if (err != OK) { 7325 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 7326 return err; 7327 } 7328 } 7329 7330 int32_t dummy; 7331 if (params->findInt32("request-sync", &dummy)) { 7332 status_t err = requestIDRFrame(); 7333 7334 if (err != OK) { 7335 ALOGE("Requesting a sync frame failed w/ err %d", err); 7336 return err; 7337 } 7338 } 7339 7340 float rate; 7341 if (params->findFloat("operating-rate", &rate) && rate > 0) { 7342 status_t err = setOperatingRate(rate, mIsVideo); 7343 if (err != OK) { 7344 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 7345 return err; 7346 } 7347 } 7348 7349 int32_t intraRefreshPeriod = 0; 7350 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 7351 && intraRefreshPeriod > 0) { 7352 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 7353 if (err != OK) { 7354 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 7355 mComponentName.c_str()); 7356 err = OK; 7357 } 7358 } 7359 7360 return OK; 7361} 7362 7363void ACodec::onSignalEndOfInputStream() { 7364 sp<AMessage> notify = mNotify->dup(); 7365 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 7366 7367 status_t err = mOMX->signalEndOfInputStream(mNode); 7368 if (err != OK) { 7369 notify->setInt32("err", err); 7370 } 7371 notify->post(); 7372} 7373 7374bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 7375 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7376 return true; 7377} 7378 7379bool ACodec::ExecutingState::onOMXEvent( 7380 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7381 switch (event) { 7382 case OMX_EventPortSettingsChanged: 7383 { 7384 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 7385 7386 mCodec->onOutputFormatChanged(); 7387 7388 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 7389 mCodec->mMetadataBuffersToSubmit = 0; 7390 CHECK_EQ(mCodec->mOMX->sendCommand( 7391 mCodec->mNode, 7392 OMX_CommandPortDisable, kPortIndexOutput), 7393 (status_t)OK); 7394 7395 mCodec->freeOutputBuffersNotOwnedByComponent(); 7396 7397 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7398 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7399 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7400 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7401 mCodec->mComponentName.c_str(), data2); 7402 } 7403 7404 return true; 7405 } 7406 7407 case OMX_EventBufferFlag: 7408 { 7409 return true; 7410 } 7411 7412 default: 7413 return BaseState::onOMXEvent(event, data1, data2); 7414 } 7415} 7416 7417//////////////////////////////////////////////////////////////////////////////// 7418 7419ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7420 ACodec *codec) 7421 : BaseState(codec) { 7422} 7423 7424ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7425 OMX_U32 portIndex) { 7426 if (portIndex == kPortIndexOutput) { 7427 return FREE_BUFFERS; 7428 } 7429 7430 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7431 7432 return RESUBMIT_BUFFERS; 7433} 7434 7435bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7436 const sp<AMessage> &msg) { 7437 bool handled = false; 7438 7439 switch (msg->what()) { 7440 case kWhatFlush: 7441 case kWhatShutdown: 7442 case kWhatResume: 7443 case kWhatSetParameters: 7444 { 7445 if (msg->what() == kWhatResume) { 7446 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7447 } 7448 7449 mCodec->deferMessage(msg); 7450 handled = true; 7451 break; 7452 } 7453 7454 default: 7455 handled = BaseState::onMessageReceived(msg); 7456 break; 7457 } 7458 7459 return handled; 7460} 7461 7462void ACodec::OutputPortSettingsChangedState::stateEntered() { 7463 ALOGV("[%s] Now handling output port settings change", 7464 mCodec->mComponentName.c_str()); 7465} 7466 7467bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7468 int64_t mediaTimeUs, nsecs_t systemNano) { 7469 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7470 return true; 7471} 7472 7473bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7474 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7475 switch (event) { 7476 case OMX_EventCmdComplete: 7477 { 7478 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7479 if (data2 != (OMX_U32)kPortIndexOutput) { 7480 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7481 return false; 7482 } 7483 7484 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7485 7486 status_t err = OK; 7487 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7488 ALOGE("disabled port should be empty, but has %zu buffers", 7489 mCodec->mBuffers[kPortIndexOutput].size()); 7490 err = FAILED_TRANSACTION; 7491 } else { 7492 mCodec->mDealer[kPortIndexOutput].clear(); 7493 } 7494 7495 if (err == OK) { 7496 err = mCodec->mOMX->sendCommand( 7497 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput); 7498 } 7499 7500 if (err == OK) { 7501 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7502 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7503 "reconfiguration: (%d)", err); 7504 } 7505 7506 if (err != OK) { 7507 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7508 7509 // This is technically not correct, but appears to be 7510 // the only way to free the component instance. 7511 // Controlled transitioning from excecuting->idle 7512 // and idle->loaded seem impossible probably because 7513 // the output port never finishes re-enabling. 7514 mCodec->mShutdownInProgress = true; 7515 mCodec->mKeepComponentAllocated = false; 7516 mCodec->changeState(mCodec->mLoadedState); 7517 } 7518 7519 return true; 7520 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7521 if (data2 != (OMX_U32)kPortIndexOutput) { 7522 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7523 return false; 7524 } 7525 7526 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7527 7528 if (mCodec->mExecutingState->active()) { 7529 mCodec->mExecutingState->submitOutputBuffers(); 7530 } 7531 7532 mCodec->changeState(mCodec->mExecutingState); 7533 7534 return true; 7535 } 7536 7537 return false; 7538 } 7539 7540 default: 7541 return false; 7542 } 7543} 7544 7545//////////////////////////////////////////////////////////////////////////////// 7546 7547ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7548 : BaseState(codec), 7549 mComponentNowIdle(false) { 7550} 7551 7552bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7553 bool handled = false; 7554 7555 switch (msg->what()) { 7556 case kWhatFlush: 7557 { 7558 // Don't send me a flush request if you previously wanted me 7559 // to shutdown. 7560 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7561 break; 7562 } 7563 7564 case kWhatShutdown: 7565 { 7566 // We're already doing that... 7567 7568 handled = true; 7569 break; 7570 } 7571 7572 default: 7573 handled = BaseState::onMessageReceived(msg); 7574 break; 7575 } 7576 7577 return handled; 7578} 7579 7580void ACodec::ExecutingToIdleState::stateEntered() { 7581 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7582 7583 mComponentNowIdle = false; 7584 mCodec->mLastOutputFormat.clear(); 7585} 7586 7587bool ACodec::ExecutingToIdleState::onOMXEvent( 7588 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7589 switch (event) { 7590 case OMX_EventCmdComplete: 7591 { 7592 if (data1 != (OMX_U32)OMX_CommandStateSet 7593 || data2 != (OMX_U32)OMX_StateIdle) { 7594 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7595 asString((OMX_COMMANDTYPE)data1), data1, 7596 asString((OMX_STATETYPE)data2), data2); 7597 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7598 return true; 7599 } 7600 7601 mComponentNowIdle = true; 7602 7603 changeStateIfWeOwnAllBuffers(); 7604 7605 return true; 7606 } 7607 7608 case OMX_EventPortSettingsChanged: 7609 case OMX_EventBufferFlag: 7610 { 7611 // We're shutting down and don't care about this anymore. 7612 return true; 7613 } 7614 7615 default: 7616 return BaseState::onOMXEvent(event, data1, data2); 7617 } 7618} 7619 7620void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7621 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7622 status_t err = mCodec->mOMX->sendCommand( 7623 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7624 if (err == OK) { 7625 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7626 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7627 if (err == OK) { 7628 err = err2; 7629 } 7630 } 7631 7632 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7633 && mCodec->mNativeWindow != NULL) { 7634 // We push enough 1x1 blank buffers to ensure that one of 7635 // them has made it to the display. This allows the OMX 7636 // component teardown to zero out any protected buffers 7637 // without the risk of scanning out one of those buffers. 7638 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7639 } 7640 7641 if (err != OK) { 7642 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7643 return; 7644 } 7645 7646 mCodec->changeState(mCodec->mIdleToLoadedState); 7647 } 7648} 7649 7650void ACodec::ExecutingToIdleState::onInputBufferFilled( 7651 const sp<AMessage> &msg) { 7652 BaseState::onInputBufferFilled(msg); 7653 7654 changeStateIfWeOwnAllBuffers(); 7655} 7656 7657void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7658 const sp<AMessage> &msg) { 7659 BaseState::onOutputBufferDrained(msg); 7660 7661 changeStateIfWeOwnAllBuffers(); 7662} 7663 7664//////////////////////////////////////////////////////////////////////////////// 7665 7666ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7667 : BaseState(codec) { 7668} 7669 7670bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7671 bool handled = false; 7672 7673 switch (msg->what()) { 7674 case kWhatShutdown: 7675 { 7676 // We're already doing that... 7677 7678 handled = true; 7679 break; 7680 } 7681 7682 case kWhatFlush: 7683 { 7684 // Don't send me a flush request if you previously wanted me 7685 // to shutdown. 7686 ALOGE("Got flush request in IdleToLoadedState"); 7687 break; 7688 } 7689 7690 default: 7691 handled = BaseState::onMessageReceived(msg); 7692 break; 7693 } 7694 7695 return handled; 7696} 7697 7698void ACodec::IdleToLoadedState::stateEntered() { 7699 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7700} 7701 7702bool ACodec::IdleToLoadedState::onOMXEvent( 7703 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7704 switch (event) { 7705 case OMX_EventCmdComplete: 7706 { 7707 if (data1 != (OMX_U32)OMX_CommandStateSet 7708 || data2 != (OMX_U32)OMX_StateLoaded) { 7709 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7710 asString((OMX_COMMANDTYPE)data1), data1, 7711 asString((OMX_STATETYPE)data2), data2); 7712 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7713 return true; 7714 } 7715 7716 mCodec->changeState(mCodec->mLoadedState); 7717 7718 return true; 7719 } 7720 7721 default: 7722 return BaseState::onOMXEvent(event, data1, data2); 7723 } 7724} 7725 7726//////////////////////////////////////////////////////////////////////////////// 7727 7728ACodec::FlushingState::FlushingState(ACodec *codec) 7729 : BaseState(codec) { 7730} 7731 7732void ACodec::FlushingState::stateEntered() { 7733 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7734 7735 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7736} 7737 7738bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7739 bool handled = false; 7740 7741 switch (msg->what()) { 7742 case kWhatShutdown: 7743 { 7744 mCodec->deferMessage(msg); 7745 break; 7746 } 7747 7748 case kWhatFlush: 7749 { 7750 // We're already doing this right now. 7751 handled = true; 7752 break; 7753 } 7754 7755 default: 7756 handled = BaseState::onMessageReceived(msg); 7757 break; 7758 } 7759 7760 return handled; 7761} 7762 7763bool ACodec::FlushingState::onOMXEvent( 7764 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7765 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7766 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7767 7768 switch (event) { 7769 case OMX_EventCmdComplete: 7770 { 7771 if (data1 != (OMX_U32)OMX_CommandFlush) { 7772 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7773 asString((OMX_COMMANDTYPE)data1), data1, data2); 7774 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7775 return true; 7776 } 7777 7778 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7779 if (mFlushComplete[data2]) { 7780 ALOGW("Flush already completed for %s port", 7781 data2 == kPortIndexInput ? "input" : "output"); 7782 return true; 7783 } 7784 mFlushComplete[data2] = true; 7785 7786 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7787 changeStateIfWeOwnAllBuffers(); 7788 } 7789 } else if (data2 == OMX_ALL) { 7790 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7791 ALOGW("received flush complete event for OMX_ALL before ports have been" 7792 "flushed (%d/%d)", 7793 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7794 return false; 7795 } 7796 7797 changeStateIfWeOwnAllBuffers(); 7798 } else { 7799 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7800 } 7801 7802 return true; 7803 } 7804 7805 case OMX_EventPortSettingsChanged: 7806 { 7807 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7808 msg->setInt32("type", omx_message::EVENT); 7809 msg->setInt32("node", mCodec->mNode); 7810 msg->setInt32("event", event); 7811 msg->setInt32("data1", data1); 7812 msg->setInt32("data2", data2); 7813 7814 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7815 mCodec->mComponentName.c_str()); 7816 7817 mCodec->deferMessage(msg); 7818 7819 return true; 7820 } 7821 7822 default: 7823 return BaseState::onOMXEvent(event, data1, data2); 7824 } 7825 7826 return true; 7827} 7828 7829void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7830 BaseState::onOutputBufferDrained(msg); 7831 7832 changeStateIfWeOwnAllBuffers(); 7833} 7834 7835void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7836 BaseState::onInputBufferFilled(msg); 7837 7838 changeStateIfWeOwnAllBuffers(); 7839} 7840 7841void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 7842 if (mFlushComplete[kPortIndexInput] 7843 && mFlushComplete[kPortIndexOutput] 7844 && mCodec->allYourBuffersAreBelongToUs()) { 7845 // We now own all buffers except possibly those still queued with 7846 // the native window for rendering. Let's get those back as well. 7847 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 7848 7849 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7850 7851 sp<AMessage> notify = mCodec->mNotify->dup(); 7852 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7853 notify->post(); 7854 7855 mCodec->mPortEOS[kPortIndexInput] = 7856 mCodec->mPortEOS[kPortIndexOutput] = false; 7857 7858 mCodec->mInputEOSResult = OK; 7859 7860 if (mCodec->mSkipCutBuffer != NULL) { 7861 mCodec->mSkipCutBuffer->clear(); 7862 } 7863 7864 mCodec->changeState(mCodec->mExecutingState); 7865 } 7866} 7867 7868status_t ACodec::queryCapabilities( 7869 const AString &name, const AString &mime, bool isEncoder, 7870 sp<MediaCodecInfo::Capabilities> *caps) { 7871 (*caps).clear(); 7872 const char *role = getComponentRole(isEncoder, mime.c_str()); 7873 if (role == NULL) { 7874 return BAD_VALUE; 7875 } 7876 7877 OMXClient client; 7878 status_t err = client.connect(); 7879 if (err != OK) { 7880 return err; 7881 } 7882 7883 sp<IOMX> omx = client.interface(); 7884 sp<CodecObserver> observer = new CodecObserver; 7885 IOMX::node_id node = 0; 7886 7887 err = omx->allocateNode(name.c_str(), observer, NULL, &node); 7888 if (err != OK) { 7889 client.disconnect(); 7890 return err; 7891 } 7892 7893 err = setComponentRole(omx, node, role); 7894 if (err != OK) { 7895 omx->freeNode(node); 7896 client.disconnect(); 7897 return err; 7898 } 7899 7900 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 7901 bool isVideo = mime.startsWithIgnoreCase("video/"); 7902 7903 if (isVideo) { 7904 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 7905 InitOMXParams(¶m); 7906 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7907 7908 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7909 param.nProfileIndex = index; 7910 status_t err = omx->getParameter( 7911 node, OMX_IndexParamVideoProfileLevelQuerySupported, 7912 ¶m, sizeof(param)); 7913 if (err != OK) { 7914 break; 7915 } 7916 builder->addProfileLevel(param.eProfile, param.eLevel); 7917 7918 if (index == kMaxIndicesToCheck) { 7919 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 7920 name.c_str(), index, 7921 param.eProfile, param.eLevel); 7922 } 7923 } 7924 7925 // Color format query 7926 // return colors in the order reported by the OMX component 7927 // prefix "flexible" standard ones with the flexible equivalent 7928 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 7929 InitOMXParams(&portFormat); 7930 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 7931 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 7932 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7933 portFormat.nIndex = index; 7934 status_t err = omx->getParameter( 7935 node, OMX_IndexParamVideoPortFormat, 7936 &portFormat, sizeof(portFormat)); 7937 if (err != OK) { 7938 break; 7939 } 7940 7941 OMX_U32 flexibleEquivalent; 7942 if (isFlexibleColorFormat( 7943 omx, node, portFormat.eColorFormat, false /* usingNativeWindow */, 7944 &flexibleEquivalent)) { 7945 bool marked = false; 7946 for (size_t i = 0; i < supportedColors.size(); ++i) { 7947 if (supportedColors[i] == flexibleEquivalent) { 7948 marked = true; 7949 break; 7950 } 7951 } 7952 if (!marked) { 7953 supportedColors.push(flexibleEquivalent); 7954 builder->addColorFormat(flexibleEquivalent); 7955 } 7956 } 7957 supportedColors.push(portFormat.eColorFormat); 7958 builder->addColorFormat(portFormat.eColorFormat); 7959 7960 if (index == kMaxIndicesToCheck) { 7961 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 7962 name.c_str(), index, 7963 asString(portFormat.eColorFormat), portFormat.eColorFormat); 7964 } 7965 } 7966 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 7967 // More audio codecs if they have profiles. 7968 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 7969 InitOMXParams(¶m); 7970 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7971 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7972 param.nProfileIndex = index; 7973 status_t err = omx->getParameter( 7974 node, (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 7975 ¶m, sizeof(param)); 7976 if (err != OK) { 7977 break; 7978 } 7979 // For audio, level is ignored. 7980 builder->addProfileLevel(param.eProfile, 0 /* level */); 7981 7982 if (index == kMaxIndicesToCheck) { 7983 ALOGW("[%s] stopping checking profiles after %u: %x", 7984 name.c_str(), index, 7985 param.eProfile); 7986 } 7987 } 7988 7989 // NOTE: Without Android extensions, OMX does not provide a way to query 7990 // AAC profile support 7991 if (param.nProfileIndex == 0) { 7992 ALOGW("component %s doesn't support profile query.", name.c_str()); 7993 } 7994 } 7995 7996 if (isVideo && !isEncoder) { 7997 native_handle_t *sidebandHandle = NULL; 7998 if (omx->configureVideoTunnelMode( 7999 node, kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 8000 // tunneled playback includes adaptive playback 8001 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 8002 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 8003 } else if (omx->storeMetaDataInBuffers( 8004 node, kPortIndexOutput, OMX_TRUE) == OK || 8005 omx->prepareForAdaptivePlayback( 8006 node, kPortIndexOutput, OMX_TRUE, 8007 1280 /* width */, 720 /* height */) == OK) { 8008 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 8009 } 8010 } 8011 8012 if (isVideo && isEncoder) { 8013 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 8014 InitOMXParams(¶ms); 8015 params.nPortIndex = kPortIndexOutput; 8016 // TODO: should we verify if fallback is supported? 8017 if (omx->getConfig( 8018 node, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 8019 ¶ms, sizeof(params)) == OK) { 8020 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 8021 } 8022 } 8023 8024 *caps = builder; 8025 omx->freeNode(node); 8026 client.disconnect(); 8027 return OK; 8028} 8029 8030// These are supposed be equivalent to the logic in 8031// "audio_channel_out_mask_from_count". 8032//static 8033status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 8034 switch (numChannels) { 8035 case 1: 8036 map[0] = OMX_AUDIO_ChannelCF; 8037 break; 8038 case 2: 8039 map[0] = OMX_AUDIO_ChannelLF; 8040 map[1] = OMX_AUDIO_ChannelRF; 8041 break; 8042 case 3: 8043 map[0] = OMX_AUDIO_ChannelLF; 8044 map[1] = OMX_AUDIO_ChannelRF; 8045 map[2] = OMX_AUDIO_ChannelCF; 8046 break; 8047 case 4: 8048 map[0] = OMX_AUDIO_ChannelLF; 8049 map[1] = OMX_AUDIO_ChannelRF; 8050 map[2] = OMX_AUDIO_ChannelLR; 8051 map[3] = OMX_AUDIO_ChannelRR; 8052 break; 8053 case 5: 8054 map[0] = OMX_AUDIO_ChannelLF; 8055 map[1] = OMX_AUDIO_ChannelRF; 8056 map[2] = OMX_AUDIO_ChannelCF; 8057 map[3] = OMX_AUDIO_ChannelLR; 8058 map[4] = OMX_AUDIO_ChannelRR; 8059 break; 8060 case 6: 8061 map[0] = OMX_AUDIO_ChannelLF; 8062 map[1] = OMX_AUDIO_ChannelRF; 8063 map[2] = OMX_AUDIO_ChannelCF; 8064 map[3] = OMX_AUDIO_ChannelLFE; 8065 map[4] = OMX_AUDIO_ChannelLR; 8066 map[5] = OMX_AUDIO_ChannelRR; 8067 break; 8068 case 7: 8069 map[0] = OMX_AUDIO_ChannelLF; 8070 map[1] = OMX_AUDIO_ChannelRF; 8071 map[2] = OMX_AUDIO_ChannelCF; 8072 map[3] = OMX_AUDIO_ChannelLFE; 8073 map[4] = OMX_AUDIO_ChannelLR; 8074 map[5] = OMX_AUDIO_ChannelRR; 8075 map[6] = OMX_AUDIO_ChannelCS; 8076 break; 8077 case 8: 8078 map[0] = OMX_AUDIO_ChannelLF; 8079 map[1] = OMX_AUDIO_ChannelRF; 8080 map[2] = OMX_AUDIO_ChannelCF; 8081 map[3] = OMX_AUDIO_ChannelLFE; 8082 map[4] = OMX_AUDIO_ChannelLR; 8083 map[5] = OMX_AUDIO_ChannelRR; 8084 map[6] = OMX_AUDIO_ChannelLS; 8085 map[7] = OMX_AUDIO_ChannelRS; 8086 break; 8087 default: 8088 return -EINVAL; 8089 } 8090 8091 return OK; 8092} 8093 8094} // namespace android 8095