ACodec.cpp revision efcafd9117a923d8321d4395b08e69e01d33bb8c
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47 48#include <OMX_AudioExt.h> 49#include <OMX_VideoExt.h> 50#include <OMX_Component.h> 51#include <OMX_IndexExt.h> 52#include <OMX_AsString.h> 53 54#include "include/avc_utils.h" 55#include "include/DataConverter.h" 56#include "omx/OMXUtils.h" 57 58namespace android { 59 60enum { 61 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 62}; 63 64// OMX errors are directly mapped into status_t range if 65// there is no corresponding MediaError status code. 66// Use the statusFromOMXError(int32_t omxError) function. 67// 68// Currently this is a direct map. 69// See frameworks/native/include/media/openmax/OMX_Core.h 70// 71// Vendor OMX errors from 0x90000000 - 0x9000FFFF 72// Extension OMX errors from 0x8F000000 - 0x90000000 73// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 74// 75 76// returns true if err is a recognized OMX error code. 77// as OMX error is OMX_S32, this is an int32_t type 78static inline bool isOMXError(int32_t err) { 79 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 80} 81 82// converts an OMX error to a status_t 83static inline status_t statusFromOMXError(int32_t omxError) { 84 switch (omxError) { 85 case OMX_ErrorInvalidComponentName: 86 case OMX_ErrorComponentNotFound: 87 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 88 default: 89 return isOMXError(omxError) ? omxError : 0; // no translation required 90 } 91} 92 93// checks and converts status_t to a non-side-effect status_t 94static inline status_t makeNoSideEffectStatus(status_t err) { 95 switch (err) { 96 // the following errors have side effects and may come 97 // from other code modules. Remap for safety reasons. 98 case INVALID_OPERATION: 99 case DEAD_OBJECT: 100 return UNKNOWN_ERROR; 101 default: 102 return err; 103 } 104} 105 106struct MessageList : public RefBase { 107 MessageList() { 108 } 109 virtual ~MessageList() { 110 } 111 std::list<sp<AMessage> > &getList() { return mList; } 112private: 113 std::list<sp<AMessage> > mList; 114 115 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 116}; 117 118static sp<DataConverter> getCopyConverter() { 119 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited 120 static sp<DataConverter> sCopyConverter; // zero-inited 121 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); 122 return sCopyConverter; 123} 124 125struct CodecObserver : public BnOMXObserver { 126 CodecObserver() {} 127 128 void setNotificationMessage(const sp<AMessage> &msg) { 129 mNotify = msg; 130 } 131 132 // from IOMXObserver 133 virtual void onMessages(const std::list<omx_message> &messages) { 134 if (messages.empty()) { 135 return; 136 } 137 138 sp<AMessage> notify = mNotify->dup(); 139 bool first = true; 140 sp<MessageList> msgList = new MessageList(); 141 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 142 it != messages.cend(); ++it) { 143 const omx_message &omx_msg = *it; 144 if (first) { 145 notify->setInt32("node", omx_msg.node); 146 first = false; 147 } 148 149 sp<AMessage> msg = new AMessage; 150 msg->setInt32("type", omx_msg.type); 151 switch (omx_msg.type) { 152 case omx_message::EVENT: 153 { 154 msg->setInt32("event", omx_msg.u.event_data.event); 155 msg->setInt32("data1", omx_msg.u.event_data.data1); 156 msg->setInt32("data2", omx_msg.u.event_data.data2); 157 break; 158 } 159 160 case omx_message::EMPTY_BUFFER_DONE: 161 { 162 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 163 msg->setInt32("fence_fd", omx_msg.fenceFd); 164 break; 165 } 166 167 case omx_message::FILL_BUFFER_DONE: 168 { 169 msg->setInt32( 170 "buffer", omx_msg.u.extended_buffer_data.buffer); 171 msg->setInt32( 172 "range_offset", 173 omx_msg.u.extended_buffer_data.range_offset); 174 msg->setInt32( 175 "range_length", 176 omx_msg.u.extended_buffer_data.range_length); 177 msg->setInt32( 178 "flags", 179 omx_msg.u.extended_buffer_data.flags); 180 msg->setInt64( 181 "timestamp", 182 omx_msg.u.extended_buffer_data.timestamp); 183 msg->setInt32( 184 "fence_fd", omx_msg.fenceFd); 185 break; 186 } 187 188 case omx_message::FRAME_RENDERED: 189 { 190 msg->setInt64( 191 "media_time_us", omx_msg.u.render_data.timestamp); 192 msg->setInt64( 193 "system_nano", omx_msg.u.render_data.nanoTime); 194 break; 195 } 196 197 default: 198 ALOGE("Unrecognized message type: %d", omx_msg.type); 199 break; 200 } 201 msgList->getList().push_back(msg); 202 } 203 notify->setObject("messages", msgList); 204 notify->post(); 205 } 206 207protected: 208 virtual ~CodecObserver() {} 209 210private: 211 sp<AMessage> mNotify; 212 213 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 214}; 215 216//////////////////////////////////////////////////////////////////////////////// 217 218struct ACodec::BaseState : public AState { 219 explicit BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 220 221protected: 222 enum PortMode { 223 KEEP_BUFFERS, 224 RESUBMIT_BUFFERS, 225 FREE_BUFFERS, 226 }; 227 228 ACodec *mCodec; 229 230 virtual PortMode getPortMode(OMX_U32 portIndex); 231 232 virtual bool onMessageReceived(const sp<AMessage> &msg); 233 234 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 235 236 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 237 virtual void onInputBufferFilled(const sp<AMessage> &msg); 238 239 void postFillThisBuffer(BufferInfo *info); 240 241private: 242 // Handles an OMX message. Returns true iff message was handled. 243 bool onOMXMessage(const sp<AMessage> &msg); 244 245 // Handles a list of messages. Returns true iff messages were handled. 246 bool onOMXMessageList(const sp<AMessage> &msg); 247 248 // returns true iff this message is for this component and the component is alive 249 bool checkOMXMessage(const sp<AMessage> &msg); 250 251 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 252 253 bool onOMXFillBufferDone( 254 IOMX::buffer_id bufferID, 255 size_t rangeOffset, size_t rangeLength, 256 OMX_U32 flags, 257 int64_t timeUs, 258 int fenceFd); 259 260 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 261 262 void getMoreInputDataIfPossible(); 263 264 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 265}; 266 267//////////////////////////////////////////////////////////////////////////////// 268 269struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 270 explicit DeathNotifier(const sp<AMessage> ¬ify) 271 : mNotify(notify) { 272 } 273 274 virtual void binderDied(const wp<IBinder> &) { 275 mNotify->post(); 276 } 277 278protected: 279 virtual ~DeathNotifier() {} 280 281private: 282 sp<AMessage> mNotify; 283 284 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 285}; 286 287struct ACodec::UninitializedState : public ACodec::BaseState { 288 explicit UninitializedState(ACodec *codec); 289 290protected: 291 virtual bool onMessageReceived(const sp<AMessage> &msg); 292 virtual void stateEntered(); 293 294private: 295 void onSetup(const sp<AMessage> &msg); 296 bool onAllocateComponent(const sp<AMessage> &msg); 297 298 sp<DeathNotifier> mDeathNotifier; 299 300 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 301}; 302 303//////////////////////////////////////////////////////////////////////////////// 304 305struct ACodec::LoadedState : public ACodec::BaseState { 306 explicit LoadedState(ACodec *codec); 307 308protected: 309 virtual bool onMessageReceived(const sp<AMessage> &msg); 310 virtual void stateEntered(); 311 312private: 313 friend struct ACodec::UninitializedState; 314 315 bool onConfigureComponent(const sp<AMessage> &msg); 316 void onCreateInputSurface(const sp<AMessage> &msg); 317 void onSetInputSurface(const sp<AMessage> &msg); 318 void onStart(); 319 void onShutdown(bool keepComponentAllocated); 320 321 status_t setupInputSurface(); 322 323 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 324}; 325 326//////////////////////////////////////////////////////////////////////////////// 327 328struct ACodec::LoadedToIdleState : public ACodec::BaseState { 329 explicit LoadedToIdleState(ACodec *codec); 330 331protected: 332 virtual bool onMessageReceived(const sp<AMessage> &msg); 333 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 334 virtual void stateEntered(); 335 336private: 337 status_t allocateBuffers(); 338 339 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 340}; 341 342//////////////////////////////////////////////////////////////////////////////// 343 344struct ACodec::IdleToExecutingState : public ACodec::BaseState { 345 explicit IdleToExecutingState(ACodec *codec); 346 347protected: 348 virtual bool onMessageReceived(const sp<AMessage> &msg); 349 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 350 virtual void stateEntered(); 351 352private: 353 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 354}; 355 356//////////////////////////////////////////////////////////////////////////////// 357 358struct ACodec::ExecutingState : public ACodec::BaseState { 359 explicit ExecutingState(ACodec *codec); 360 361 void submitRegularOutputBuffers(); 362 void submitOutputMetaBuffers(); 363 void submitOutputBuffers(); 364 365 // Submit output buffers to the decoder, submit input buffers to client 366 // to fill with data. 367 void resume(); 368 369 // Returns true iff input and output buffers are in play. 370 bool active() const { return mActive; } 371 372protected: 373 virtual PortMode getPortMode(OMX_U32 portIndex); 374 virtual bool onMessageReceived(const sp<AMessage> &msg); 375 virtual void stateEntered(); 376 377 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 378 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 379 380private: 381 bool mActive; 382 383 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 384}; 385 386//////////////////////////////////////////////////////////////////////////////// 387 388struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 389 explicit OutputPortSettingsChangedState(ACodec *codec); 390 391protected: 392 virtual PortMode getPortMode(OMX_U32 portIndex); 393 virtual bool onMessageReceived(const sp<AMessage> &msg); 394 virtual void stateEntered(); 395 396 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 397 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 398 399private: 400 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 401}; 402 403//////////////////////////////////////////////////////////////////////////////// 404 405struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 406 explicit ExecutingToIdleState(ACodec *codec); 407 408protected: 409 virtual bool onMessageReceived(const sp<AMessage> &msg); 410 virtual void stateEntered(); 411 412 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 413 414 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 415 virtual void onInputBufferFilled(const sp<AMessage> &msg); 416 417private: 418 void changeStateIfWeOwnAllBuffers(); 419 420 bool mComponentNowIdle; 421 422 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 423}; 424 425//////////////////////////////////////////////////////////////////////////////// 426 427struct ACodec::IdleToLoadedState : public ACodec::BaseState { 428 explicit IdleToLoadedState(ACodec *codec); 429 430protected: 431 virtual bool onMessageReceived(const sp<AMessage> &msg); 432 virtual void stateEntered(); 433 434 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 435 436private: 437 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 438}; 439 440//////////////////////////////////////////////////////////////////////////////// 441 442struct ACodec::FlushingState : public ACodec::BaseState { 443 explicit FlushingState(ACodec *codec); 444 445protected: 446 virtual bool onMessageReceived(const sp<AMessage> &msg); 447 virtual void stateEntered(); 448 449 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 450 451 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 452 virtual void onInputBufferFilled(const sp<AMessage> &msg); 453 454private: 455 bool mFlushComplete[2]; 456 457 void changeStateIfWeOwnAllBuffers(); 458 459 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 460}; 461 462//////////////////////////////////////////////////////////////////////////////// 463 464void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 465 if (mFenceFd >= 0) { 466 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 467 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 468 } 469 mFenceFd = fenceFd; 470 mIsReadFence = false; 471} 472 473void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 474 if (mFenceFd >= 0) { 475 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 476 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 477 } 478 mFenceFd = fenceFd; 479 mIsReadFence = true; 480} 481 482void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 483 if (mFenceFd >= 0 && mIsReadFence) { 484 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 485 } 486} 487 488void ACodec::BufferInfo::checkReadFence(const char *dbg) { 489 if (mFenceFd >= 0 && !mIsReadFence) { 490 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 491 } 492} 493 494//////////////////////////////////////////////////////////////////////////////// 495 496ACodec::ACodec() 497 : mSampleRate(0), 498 mQuirks(0), 499 mNode(0), 500 mUsingNativeWindow(false), 501 mNativeWindowUsageBits(0), 502 mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN), 503 mIsVideo(false), 504 mIsEncoder(false), 505 mFatalError(false), 506 mShutdownInProgress(false), 507 mExplicitShutdown(false), 508 mIsLegacyVP9Decoder(false), 509 mEncoderDelay(0), 510 mEncoderPadding(0), 511 mRotationDegrees(0), 512 mChannelMaskPresent(false), 513 mChannelMask(0), 514 mDequeueCounter(0), 515 mInputMetadataType(kMetadataBufferTypeInvalid), 516 mOutputMetadataType(kMetadataBufferTypeInvalid), 517 mLegacyAdaptiveExperiment(false), 518 mMetadataBuffersToSubmit(0), 519 mNumUndequeuedBuffers(0), 520 mRepeatFrameDelayUs(-1ll), 521 mMaxPtsGapUs(-1ll), 522 mMaxFps(-1), 523 mTimePerFrameUs(-1ll), 524 mTimePerCaptureUs(-1ll), 525 mCreateInputBuffersSuspended(false), 526 mTunneled(false), 527 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0), 528 mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0) { 529 mUninitializedState = new UninitializedState(this); 530 mLoadedState = new LoadedState(this); 531 mLoadedToIdleState = new LoadedToIdleState(this); 532 mIdleToExecutingState = new IdleToExecutingState(this); 533 mExecutingState = new ExecutingState(this); 534 535 mOutputPortSettingsChangedState = 536 new OutputPortSettingsChangedState(this); 537 538 mExecutingToIdleState = new ExecutingToIdleState(this); 539 mIdleToLoadedState = new IdleToLoadedState(this); 540 mFlushingState = new FlushingState(this); 541 542 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 543 mInputEOSResult = OK; 544 545 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 546 547 changeState(mUninitializedState); 548} 549 550ACodec::~ACodec() { 551} 552 553void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 554 mNotify = msg; 555} 556 557void ACodec::initiateSetup(const sp<AMessage> &msg) { 558 msg->setWhat(kWhatSetup); 559 msg->setTarget(this); 560 msg->post(); 561} 562 563void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 564 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 565 msg->setMessage("params", params); 566 msg->post(); 567} 568 569void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 570 msg->setWhat(kWhatAllocateComponent); 571 msg->setTarget(this); 572 msg->post(); 573} 574 575void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 576 msg->setWhat(kWhatConfigureComponent); 577 msg->setTarget(this); 578 msg->post(); 579} 580 581status_t ACodec::setSurface(const sp<Surface> &surface) { 582 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 583 msg->setObject("surface", surface); 584 585 sp<AMessage> response; 586 status_t err = msg->postAndAwaitResponse(&response); 587 588 if (err == OK) { 589 (void)response->findInt32("err", &err); 590 } 591 return err; 592} 593 594void ACodec::initiateCreateInputSurface() { 595 (new AMessage(kWhatCreateInputSurface, this))->post(); 596} 597 598void ACodec::initiateSetInputSurface( 599 const sp<PersistentSurface> &surface) { 600 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 601 msg->setObject("input-surface", surface); 602 msg->post(); 603} 604 605void ACodec::signalEndOfInputStream() { 606 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 607} 608 609void ACodec::initiateStart() { 610 (new AMessage(kWhatStart, this))->post(); 611} 612 613void ACodec::signalFlush() { 614 ALOGV("[%s] signalFlush", mComponentName.c_str()); 615 (new AMessage(kWhatFlush, this))->post(); 616} 617 618void ACodec::signalResume() { 619 (new AMessage(kWhatResume, this))->post(); 620} 621 622void ACodec::initiateShutdown(bool keepComponentAllocated) { 623 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 624 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 625 msg->post(); 626 if (!keepComponentAllocated) { 627 // ensure shutdown completes in 3 seconds 628 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 629 } 630} 631 632void ACodec::signalRequestIDRFrame() { 633 (new AMessage(kWhatRequestIDRFrame, this))->post(); 634} 635 636// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 637// Some codecs may return input buffers before having them processed. 638// This causes a halt if we already signaled an EOS on the input 639// port. For now keep submitting an output buffer if there was an 640// EOS on the input port, but not yet on the output port. 641void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 642 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 643 mMetadataBuffersToSubmit > 0) { 644 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 645 } 646} 647 648status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 649 // allow keeping unset surface 650 if (surface == NULL) { 651 if (mNativeWindow != NULL) { 652 ALOGW("cannot unset a surface"); 653 return INVALID_OPERATION; 654 } 655 return OK; 656 } 657 658 // cannot switch from bytebuffers to surface 659 if (mNativeWindow == NULL) { 660 ALOGW("component was not configured with a surface"); 661 return INVALID_OPERATION; 662 } 663 664 ANativeWindow *nativeWindow = surface.get(); 665 // if we have not yet started the codec, we can simply set the native window 666 if (mBuffers[kPortIndexInput].size() == 0) { 667 mNativeWindow = surface; 668 return OK; 669 } 670 671 // we do not support changing a tunneled surface after start 672 if (mTunneled) { 673 ALOGW("cannot change tunneled surface"); 674 return INVALID_OPERATION; 675 } 676 677 int usageBits = 0; 678 // no need to reconnect as we will not dequeue all buffers 679 status_t err = setupNativeWindowSizeFormatAndUsage( 680 nativeWindow, &usageBits, 681 !storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment /* reconnect */); 682 if (err != OK) { 683 return err; 684 } 685 686 int ignoredFlags = kVideoGrallocUsage; 687 // New output surface is not allowed to add new usage flag except ignored ones. 688 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 689 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 690 return BAD_VALUE; 691 } 692 693 // get min undequeued count. We cannot switch to a surface that has a higher 694 // undequeued count than we allocated. 695 int minUndequeuedBuffers = 0; 696 err = nativeWindow->query( 697 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 698 &minUndequeuedBuffers); 699 if (err != 0) { 700 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 701 strerror(-err), -err); 702 return err; 703 } 704 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 705 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 706 minUndequeuedBuffers, mNumUndequeuedBuffers); 707 return BAD_VALUE; 708 } 709 710 // we cannot change the number of output buffers while OMX is running 711 // set up surface to the same count 712 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 713 ALOGV("setting up surface for %zu buffers", buffers.size()); 714 715 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 716 if (err != 0) { 717 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 718 -err); 719 return err; 720 } 721 722 // need to enable allocation when attaching 723 surface->getIGraphicBufferProducer()->allowAllocation(true); 724 725 // for meta data mode, we move dequeud buffers to the new surface. 726 // for non-meta mode, we must move all registered buffers 727 for (size_t i = 0; i < buffers.size(); ++i) { 728 const BufferInfo &info = buffers[i]; 729 // skip undequeued buffers for meta data mode 730 if (storingMetadataInDecodedBuffers() 731 && !mLegacyAdaptiveExperiment 732 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 733 ALOGV("skipping buffer"); 734 continue; 735 } 736 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 737 738 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 739 if (err != OK) { 740 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 741 info.mGraphicBuffer->getNativeBuffer(), 742 strerror(-err), -err); 743 return err; 744 } 745 } 746 747 // cancel undequeued buffers to new surface 748 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 749 for (size_t i = 0; i < buffers.size(); ++i) { 750 BufferInfo &info = buffers.editItemAt(i); 751 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 752 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 753 err = nativeWindow->cancelBuffer( 754 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 755 info.mFenceFd = -1; 756 if (err != OK) { 757 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 758 info.mGraphicBuffer->getNativeBuffer(), 759 strerror(-err), -err); 760 return err; 761 } 762 } 763 } 764 // disallow further allocation 765 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 766 } 767 768 // push blank buffers to previous window if requested 769 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 770 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 771 } 772 773 mNativeWindow = nativeWindow; 774 mNativeWindowUsageBits = usageBits; 775 return OK; 776} 777 778status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 779 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 780 781 CHECK(mDealer[portIndex] == NULL); 782 CHECK(mBuffers[portIndex].isEmpty()); 783 784 status_t err; 785 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 786 if (storingMetadataInDecodedBuffers()) { 787 err = allocateOutputMetadataBuffers(); 788 } else { 789 err = allocateOutputBuffersFromNativeWindow(); 790 } 791 } else { 792 OMX_PARAM_PORTDEFINITIONTYPE def; 793 InitOMXParams(&def); 794 def.nPortIndex = portIndex; 795 796 err = mOMX->getParameter( 797 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 798 799 if (err == OK) { 800 MetadataBufferType type = 801 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 802 size_t bufSize = def.nBufferSize; 803 if (type == kMetadataBufferTypeANWBuffer) { 804 bufSize = sizeof(VideoNativeMetadata); 805 } else if (type == kMetadataBufferTypeNativeHandleSource) { 806 bufSize = sizeof(VideoNativeHandleMetadata); 807 } 808 809 // If using gralloc or native source input metadata buffers, allocate largest 810 // metadata size as we prefer to generate native source metadata, but component 811 // may require gralloc source. For camera source, allocate at least enough 812 // size for native metadata buffers. 813 size_t allottedSize = bufSize; 814 if (portIndex == kPortIndexInput && type == kMetadataBufferTypeANWBuffer) { 815 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 816 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 817 bufSize = max(bufSize, sizeof(VideoNativeMetadata)); 818 } 819 820 size_t conversionBufferSize = 0; 821 822 sp<DataConverter> converter = mConverter[portIndex]; 823 if (converter != NULL) { 824 // here we assume sane conversions of max 4:1, so result fits in int32 825 if (portIndex == kPortIndexInput) { 826 conversionBufferSize = converter->sourceSize(bufSize); 827 } else { 828 conversionBufferSize = converter->targetSize(bufSize); 829 } 830 } 831 832 size_t alignment = MemoryDealer::getAllocationAlignment(); 833 834 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", 835 mComponentName.c_str(), 836 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 837 portIndex == kPortIndexInput ? "input" : "output"); 838 839 // verify buffer sizes to avoid overflow in align() 840 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { 841 ALOGE("b/22885421"); 842 return NO_MEMORY; 843 } 844 845 // don't modify bufSize as OMX may not expect it to increase after negotiation 846 size_t alignedSize = align(bufSize, alignment); 847 size_t alignedConvSize = align(conversionBufferSize, alignment); 848 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { 849 ALOGE("b/22885421"); 850 return NO_MEMORY; 851 } 852 853 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); 854 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 855 856 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 857 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 858 if (mem == NULL || mem->pointer() == NULL) { 859 return NO_MEMORY; 860 } 861 862 BufferInfo info; 863 info.mStatus = BufferInfo::OWNED_BY_US; 864 info.mFenceFd = -1; 865 info.mRenderInfo = NULL; 866 info.mNativeHandle = NULL; 867 868 uint32_t requiresAllocateBufferBit = 869 (portIndex == kPortIndexInput) 870 ? kRequiresAllocateBufferOnInputPorts 871 : kRequiresAllocateBufferOnOutputPorts; 872 873 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 874 mem.clear(); 875 876 void *ptr = NULL; 877 sp<NativeHandle> native_handle; 878 err = mOMX->allocateSecureBuffer( 879 mNode, portIndex, bufSize, &info.mBufferID, 880 &ptr, &native_handle); 881 882 // TRICKY: this representation is unorthodox, but ACodec requires 883 // an ABuffer with a proper size to validate range offsets and lengths. 884 // Since mData is never referenced for secure input, it is used to store 885 // either the pointer to the secure buffer, or the opaque handle as on 886 // some devices ptr is actually an opaque handle, not a pointer. 887 888 // TRICKY2: use native handle as the base of the ABuffer if received one, 889 // because Widevine source only receives these base addresses. 890 const native_handle_t *native_handle_ptr = 891 native_handle == NULL ? NULL : native_handle->handle(); 892 info.mData = new ABuffer( 893 ptr != NULL ? ptr : (void *)native_handle_ptr, bufSize); 894 info.mNativeHandle = native_handle; 895 info.mCodecData = info.mData; 896 } else if (mQuirks & requiresAllocateBufferBit) { 897 err = mOMX->allocateBufferWithBackup( 898 mNode, portIndex, mem, &info.mBufferID, allottedSize); 899 } else { 900 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize); 901 } 902 903 if (mem != NULL) { 904 info.mCodecData = new ABuffer(mem->pointer(), bufSize); 905 info.mCodecRef = mem; 906 907 if (type == kMetadataBufferTypeANWBuffer) { 908 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 909 } 910 911 // if we require conversion, allocate conversion buffer for client use; 912 // otherwise, reuse codec buffer 913 if (mConverter[portIndex] != NULL) { 914 CHECK_GT(conversionBufferSize, (size_t)0); 915 mem = mDealer[portIndex]->allocate(conversionBufferSize); 916 if (mem == NULL|| mem->pointer() == NULL) { 917 return NO_MEMORY; 918 } 919 info.mData = new ABuffer(mem->pointer(), conversionBufferSize); 920 info.mMemRef = mem; 921 } else { 922 info.mData = info.mCodecData; 923 info.mMemRef = info.mCodecRef; 924 } 925 } 926 927 mBuffers[portIndex].push(info); 928 } 929 } 930 } 931 932 if (err != OK) { 933 return err; 934 } 935 936 sp<AMessage> notify = mNotify->dup(); 937 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 938 939 notify->setInt32("portIndex", portIndex); 940 941 sp<PortDescription> desc = new PortDescription; 942 943 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 944 const BufferInfo &info = mBuffers[portIndex][i]; 945 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef); 946 } 947 948 notify->setObject("portDesc", desc); 949 notify->post(); 950 951 return OK; 952} 953 954status_t ACodec::setupNativeWindowSizeFormatAndUsage( 955 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */, 956 bool reconnect) { 957 OMX_PARAM_PORTDEFINITIONTYPE def; 958 InitOMXParams(&def); 959 def.nPortIndex = kPortIndexOutput; 960 961 status_t err = mOMX->getParameter( 962 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 963 964 if (err != OK) { 965 return err; 966 } 967 968 OMX_U32 usage = 0; 969 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 970 if (err != 0) { 971 ALOGW("querying usage flags from OMX IL component failed: %d", err); 972 // XXX: Currently this error is logged, but not fatal. 973 usage = 0; 974 } 975 int omxUsage = usage; 976 977 if (mFlags & kFlagIsGrallocUsageProtected) { 978 usage |= GRALLOC_USAGE_PROTECTED; 979 } 980 981 usage |= kVideoGrallocUsage; 982 *finalUsage = usage; 983 984 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 985 mLastNativeWindowDataSpace = HAL_DATASPACE_UNKNOWN; 986 987 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 988 return setNativeWindowSizeFormatAndUsage( 989 nativeWindow, 990 def.format.video.nFrameWidth, 991 def.format.video.nFrameHeight, 992 def.format.video.eColorFormat, 993 mRotationDegrees, 994 usage, 995 reconnect); 996} 997 998status_t ACodec::configureOutputBuffersFromNativeWindow( 999 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 1000 OMX_U32 *minUndequeuedBuffers, bool preregister) { 1001 1002 OMX_PARAM_PORTDEFINITIONTYPE def; 1003 InitOMXParams(&def); 1004 def.nPortIndex = kPortIndexOutput; 1005 1006 status_t err = mOMX->getParameter( 1007 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1008 1009 if (err == OK) { 1010 err = setupNativeWindowSizeFormatAndUsage( 1011 mNativeWindow.get(), &mNativeWindowUsageBits, preregister /* reconnect */); 1012 } 1013 if (err != OK) { 1014 mNativeWindowUsageBits = 0; 1015 return err; 1016 } 1017 1018 // Exits here for tunneled video playback codecs -- i.e. skips native window 1019 // buffer allocation step as this is managed by the tunneled OMX omponent 1020 // itself and explicitly sets def.nBufferCountActual to 0. 1021 if (mTunneled) { 1022 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 1023 def.nBufferCountActual = 0; 1024 err = mOMX->setParameter( 1025 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1026 1027 *minUndequeuedBuffers = 0; 1028 *bufferCount = 0; 1029 *bufferSize = 0; 1030 return err; 1031 } 1032 1033 *minUndequeuedBuffers = 0; 1034 err = mNativeWindow->query( 1035 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 1036 (int *)minUndequeuedBuffers); 1037 1038 if (err != 0) { 1039 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 1040 strerror(-err), -err); 1041 return err; 1042 } 1043 1044 // FIXME: assume that surface is controlled by app (native window 1045 // returns the number for the case when surface is not controlled by app) 1046 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 1047 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 1048 1049 // Use conservative allocation while also trying to reduce starvation 1050 // 1051 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 1052 // minimum needed for the consumer to be able to work 1053 // 2. try to allocate two (2) additional buffers to reduce starvation from 1054 // the consumer 1055 // plus an extra buffer to account for incorrect minUndequeuedBufs 1056 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 1057 OMX_U32 newBufferCount = 1058 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1059 def.nBufferCountActual = newBufferCount; 1060 err = mOMX->setParameter( 1061 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1062 1063 if (err == OK) { 1064 *minUndequeuedBuffers += extraBuffers; 1065 break; 1066 } 1067 1068 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1069 mComponentName.c_str(), newBufferCount, err); 1070 /* exit condition */ 1071 if (extraBuffers == 0) { 1072 return err; 1073 } 1074 } 1075 1076 err = native_window_set_buffer_count( 1077 mNativeWindow.get(), def.nBufferCountActual); 1078 1079 if (err != 0) { 1080 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1081 -err); 1082 return err; 1083 } 1084 1085 *bufferCount = def.nBufferCountActual; 1086 *bufferSize = def.nBufferSize; 1087 return err; 1088} 1089 1090status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1091 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1092 status_t err = configureOutputBuffersFromNativeWindow( 1093 &bufferCount, &bufferSize, &minUndequeuedBuffers, true /* preregister */); 1094 if (err != 0) 1095 return err; 1096 mNumUndequeuedBuffers = minUndequeuedBuffers; 1097 1098 if (!storingMetadataInDecodedBuffers()) { 1099 static_cast<Surface*>(mNativeWindow.get()) 1100 ->getIGraphicBufferProducer()->allowAllocation(true); 1101 } 1102 1103 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1104 "output port", 1105 mComponentName.c_str(), bufferCount, bufferSize); 1106 1107 // Dequeue buffers and send them to OMX 1108 for (OMX_U32 i = 0; i < bufferCount; i++) { 1109 ANativeWindowBuffer *buf; 1110 int fenceFd; 1111 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1112 if (err != 0) { 1113 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1114 break; 1115 } 1116 1117 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1118 BufferInfo info; 1119 info.mStatus = BufferInfo::OWNED_BY_US; 1120 info.mFenceFd = fenceFd; 1121 info.mIsReadFence = false; 1122 info.mRenderInfo = NULL; 1123 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 1124 info.mCodecData = info.mData; 1125 info.mGraphicBuffer = graphicBuffer; 1126 mBuffers[kPortIndexOutput].push(info); 1127 1128 IOMX::buffer_id bufferId; 1129 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 1130 &bufferId); 1131 if (err != 0) { 1132 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1133 "%d", i, err); 1134 break; 1135 } 1136 1137 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1138 1139 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1140 mComponentName.c_str(), 1141 bufferId, graphicBuffer.get()); 1142 } 1143 1144 OMX_U32 cancelStart; 1145 OMX_U32 cancelEnd; 1146 1147 if (err != 0) { 1148 // If an error occurred while dequeuing we need to cancel any buffers 1149 // that were dequeued. 1150 cancelStart = 0; 1151 cancelEnd = mBuffers[kPortIndexOutput].size(); 1152 } else { 1153 // Return the required minimum undequeued buffers to the native window. 1154 cancelStart = bufferCount - minUndequeuedBuffers; 1155 cancelEnd = bufferCount; 1156 } 1157 1158 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1159 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1160 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1161 status_t error = cancelBufferToNativeWindow(info); 1162 if (err == 0) { 1163 err = error; 1164 } 1165 } 1166 } 1167 1168 if (!storingMetadataInDecodedBuffers()) { 1169 static_cast<Surface*>(mNativeWindow.get()) 1170 ->getIGraphicBufferProducer()->allowAllocation(false); 1171 } 1172 1173 return err; 1174} 1175 1176status_t ACodec::allocateOutputMetadataBuffers() { 1177 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1178 status_t err = configureOutputBuffersFromNativeWindow( 1179 &bufferCount, &bufferSize, &minUndequeuedBuffers, 1180 mLegacyAdaptiveExperiment /* preregister */); 1181 if (err != 0) 1182 return err; 1183 mNumUndequeuedBuffers = minUndequeuedBuffers; 1184 1185 ALOGV("[%s] Allocating %u meta buffers on output port", 1186 mComponentName.c_str(), bufferCount); 1187 1188 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1189 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1190 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1191 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1192 1193 // Dequeue buffers and send them to OMX 1194 for (OMX_U32 i = 0; i < bufferCount; i++) { 1195 BufferInfo info; 1196 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1197 info.mFenceFd = -1; 1198 info.mRenderInfo = NULL; 1199 info.mGraphicBuffer = NULL; 1200 info.mDequeuedAt = mDequeueCounter; 1201 1202 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1203 if (mem == NULL || mem->pointer() == NULL) { 1204 return NO_MEMORY; 1205 } 1206 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1207 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1208 } 1209 info.mData = new ABuffer(mem->pointer(), mem->size()); 1210 info.mMemRef = mem; 1211 info.mCodecData = info.mData; 1212 info.mCodecRef = mem; 1213 1214 // we use useBuffer for metadata regardless of quirks 1215 err = mOMX->useBuffer( 1216 mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1217 mBuffers[kPortIndexOutput].push(info); 1218 1219 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1220 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1221 } 1222 1223 if (mLegacyAdaptiveExperiment) { 1224 // preallocate and preregister buffers 1225 static_cast<Surface *>(mNativeWindow.get()) 1226 ->getIGraphicBufferProducer()->allowAllocation(true); 1227 1228 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1229 "output port", 1230 mComponentName.c_str(), bufferCount, bufferSize); 1231 1232 // Dequeue buffers then cancel them all 1233 for (OMX_U32 i = 0; i < bufferCount; i++) { 1234 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1235 1236 ANativeWindowBuffer *buf; 1237 int fenceFd; 1238 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1239 if (err != 0) { 1240 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1241 break; 1242 } 1243 1244 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1245 mOMX->updateGraphicBufferInMeta( 1246 mNode, kPortIndexOutput, graphicBuffer, info->mBufferID); 1247 info->mStatus = BufferInfo::OWNED_BY_US; 1248 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1249 info->mGraphicBuffer = graphicBuffer; 1250 } 1251 1252 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1253 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1254 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1255 status_t error = cancelBufferToNativeWindow(info); 1256 if (err == OK) { 1257 err = error; 1258 } 1259 } 1260 } 1261 1262 static_cast<Surface*>(mNativeWindow.get()) 1263 ->getIGraphicBufferProducer()->allowAllocation(false); 1264 } 1265 1266 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1267 return err; 1268} 1269 1270status_t ACodec::submitOutputMetadataBuffer() { 1271 CHECK(storingMetadataInDecodedBuffers()); 1272 if (mMetadataBuffersToSubmit == 0) 1273 return OK; 1274 1275 BufferInfo *info = dequeueBufferFromNativeWindow(); 1276 if (info == NULL) { 1277 return ERROR_IO; 1278 } 1279 1280 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1281 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1282 1283 --mMetadataBuffersToSubmit; 1284 info->checkWriteFence("submitOutputMetadataBuffer"); 1285 status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd); 1286 info->mFenceFd = -1; 1287 if (err == OK) { 1288 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1289 } 1290 1291 return err; 1292} 1293 1294status_t ACodec::waitForFence(int fd, const char *dbg ) { 1295 status_t res = OK; 1296 if (fd >= 0) { 1297 sp<Fence> fence = new Fence(fd); 1298 res = fence->wait(IOMX::kFenceTimeoutMs); 1299 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1300 } 1301 return res; 1302} 1303 1304// static 1305const char *ACodec::_asString(BufferInfo::Status s) { 1306 switch (s) { 1307 case BufferInfo::OWNED_BY_US: return "OUR"; 1308 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1309 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1310 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1311 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1312 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1313 default: return "?"; 1314 } 1315} 1316 1317void ACodec::dumpBuffers(OMX_U32 portIndex) { 1318 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1319 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1320 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1321 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1322 const BufferInfo &info = mBuffers[portIndex][i]; 1323 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1324 i, info.mBufferID, info.mGraphicBuffer.get(), 1325 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1326 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1327 } 1328} 1329 1330status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1331 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1332 1333 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1334 mComponentName.c_str(), info->mBufferID); 1335 1336 info->checkWriteFence("cancelBufferToNativeWindow"); 1337 int err = mNativeWindow->cancelBuffer( 1338 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1339 info->mFenceFd = -1; 1340 1341 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1342 mComponentName.c_str(), info->mBufferID); 1343 // change ownership even if cancelBuffer fails 1344 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1345 1346 return err; 1347} 1348 1349void ACodec::updateRenderInfoForDequeuedBuffer( 1350 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1351 1352 info->mRenderInfo = 1353 mRenderTracker.updateInfoForDequeuedBuffer( 1354 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1355 1356 // check for any fences already signaled 1357 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1358} 1359 1360void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1361 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1362 mRenderTracker.dumpRenderQueue(); 1363 } 1364} 1365 1366void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1367 sp<AMessage> msg = mNotify->dup(); 1368 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1369 std::list<FrameRenderTracker::Info> done = 1370 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1371 1372 // unlink untracked frames 1373 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1374 it != done.cend(); ++it) { 1375 ssize_t index = it->getIndex(); 1376 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1377 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1378 } else if (index >= 0) { 1379 // THIS SHOULD NEVER HAPPEN 1380 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1381 } 1382 } 1383 1384 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1385 msg->post(); 1386 } 1387} 1388 1389ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1390 ANativeWindowBuffer *buf; 1391 CHECK(mNativeWindow.get() != NULL); 1392 1393 if (mTunneled) { 1394 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1395 " video playback mode mode!"); 1396 return NULL; 1397 } 1398 1399 if (mFatalError) { 1400 ALOGW("not dequeuing from native window due to fatal error"); 1401 return NULL; 1402 } 1403 1404 int fenceFd = -1; 1405 do { 1406 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1407 if (err != 0) { 1408 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1409 return NULL; 1410 } 1411 1412 bool stale = false; 1413 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1414 i--; 1415 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1416 1417 if (info->mGraphicBuffer != NULL && 1418 info->mGraphicBuffer->handle == buf->handle) { 1419 // Since consumers can attach buffers to BufferQueues, it is possible 1420 // that a known yet stale buffer can return from a surface that we 1421 // once used. We can simply ignore this as we have already dequeued 1422 // this buffer properly. NOTE: this does not eliminate all cases, 1423 // e.g. it is possible that we have queued the valid buffer to the 1424 // NW, and a stale copy of the same buffer gets dequeued - which will 1425 // be treated as the valid buffer by ACodec. 1426 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1427 ALOGI("dequeued stale buffer %p. discarding", buf); 1428 stale = true; 1429 break; 1430 } 1431 1432 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1433 info->mStatus = BufferInfo::OWNED_BY_US; 1434 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1435 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1436 return info; 1437 } 1438 } 1439 1440 // It is also possible to receive a previously unregistered buffer 1441 // in non-meta mode. These should be treated as stale buffers. The 1442 // same is possible in meta mode, in which case, it will be treated 1443 // as a normal buffer, which is not desirable. 1444 // TODO: fix this. 1445 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1446 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1447 stale = true; 1448 } 1449 if (stale) { 1450 // TODO: detach stale buffer, but there is no API yet to do it. 1451 buf = NULL; 1452 } 1453 } while (buf == NULL); 1454 1455 // get oldest undequeued buffer 1456 BufferInfo *oldest = NULL; 1457 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1458 i--; 1459 BufferInfo *info = 1460 &mBuffers[kPortIndexOutput].editItemAt(i); 1461 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1462 (oldest == NULL || 1463 // avoid potential issues from counter rolling over 1464 mDequeueCounter - info->mDequeuedAt > 1465 mDequeueCounter - oldest->mDequeuedAt)) { 1466 oldest = info; 1467 } 1468 } 1469 1470 // it is impossible dequeue a buffer when there are no buffers with ANW 1471 CHECK(oldest != NULL); 1472 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1473 // while loop above does not complete 1474 CHECK(storingMetadataInDecodedBuffers()); 1475 1476 // discard buffer in LRU info and replace with new buffer 1477 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1478 oldest->mStatus = BufferInfo::OWNED_BY_US; 1479 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1480 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1481 oldest->mRenderInfo = NULL; 1482 1483 mOMX->updateGraphicBufferInMeta( 1484 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 1485 oldest->mBufferID); 1486 1487 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1488 VideoGrallocMetadata *grallocMeta = 1489 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base()); 1490 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1491 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1492 mDequeueCounter - oldest->mDequeuedAt, 1493 (void *)(uintptr_t)grallocMeta->pHandle, 1494 oldest->mGraphicBuffer->handle, oldest->mData->base()); 1495 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1496 VideoNativeMetadata *nativeMeta = 1497 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base()); 1498 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1499 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1500 mDequeueCounter - oldest->mDequeuedAt, 1501 (void *)(uintptr_t)nativeMeta->pBuffer, 1502 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base()); 1503 } 1504 1505 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1506 return oldest; 1507} 1508 1509status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1510 status_t err = OK; 1511 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1512 i--; 1513 status_t err2 = freeBuffer(portIndex, i); 1514 if (err == OK) { 1515 err = err2; 1516 } 1517 } 1518 1519 // clear mDealer even on an error 1520 mDealer[portIndex].clear(); 1521 return err; 1522} 1523 1524status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1525 status_t err = OK; 1526 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1527 i--; 1528 BufferInfo *info = 1529 &mBuffers[kPortIndexOutput].editItemAt(i); 1530 1531 // At this time some buffers may still be with the component 1532 // or being drained. 1533 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1534 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1535 status_t err2 = freeBuffer(kPortIndexOutput, i); 1536 if (err == OK) { 1537 err = err2; 1538 } 1539 } 1540 } 1541 1542 return err; 1543} 1544 1545status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1546 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1547 status_t err = OK; 1548 1549 // there should not be any fences in the metadata 1550 MetadataBufferType type = 1551 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1552 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL 1553 && info->mData->size() >= sizeof(VideoNativeMetadata)) { 1554 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; 1555 if (fenceFd >= 0) { 1556 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1557 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1558 } 1559 } 1560 1561 switch (info->mStatus) { 1562 case BufferInfo::OWNED_BY_US: 1563 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1564 (void)cancelBufferToNativeWindow(info); 1565 } 1566 // fall through 1567 1568 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1569 err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID); 1570 break; 1571 1572 default: 1573 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1574 err = FAILED_TRANSACTION; 1575 break; 1576 } 1577 1578 if (info->mFenceFd >= 0) { 1579 ::close(info->mFenceFd); 1580 } 1581 1582 if (portIndex == kPortIndexOutput) { 1583 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1584 info->mRenderInfo = NULL; 1585 } 1586 1587 // remove buffer even if mOMX->freeBuffer fails 1588 mBuffers[portIndex].removeAt(i); 1589 return err; 1590} 1591 1592ACodec::BufferInfo *ACodec::findBufferByID( 1593 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1594 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1595 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1596 1597 if (info->mBufferID == bufferID) { 1598 if (index != NULL) { 1599 *index = i; 1600 } 1601 return info; 1602 } 1603 } 1604 1605 ALOGE("Could not find buffer with ID %u", bufferID); 1606 return NULL; 1607} 1608 1609status_t ACodec::setComponentRole( 1610 bool isEncoder, const char *mime) { 1611 const char *role = GetComponentRole(isEncoder, mime); 1612 if (role == NULL) { 1613 return BAD_VALUE; 1614 } 1615 status_t err = SetComponentRole(mOMX, mNode, role); 1616 if (err != OK) { 1617 ALOGW("[%s] Failed to set standard component role '%s'.", 1618 mComponentName.c_str(), role); 1619 } 1620 return err; 1621} 1622 1623status_t ACodec::configureCodec( 1624 const char *mime, const sp<AMessage> &msg) { 1625 int32_t encoder; 1626 if (!msg->findInt32("encoder", &encoder)) { 1627 encoder = false; 1628 } 1629 1630 sp<AMessage> inputFormat = new AMessage; 1631 sp<AMessage> outputFormat = new AMessage; 1632 mConfigFormat = msg; 1633 1634 mIsEncoder = encoder; 1635 1636 mInputMetadataType = kMetadataBufferTypeInvalid; 1637 mOutputMetadataType = kMetadataBufferTypeInvalid; 1638 1639 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1640 1641 if (err != OK) { 1642 return err; 1643 } 1644 1645 int32_t bitRate = 0; 1646 // FLAC encoder doesn't need a bitrate, other encoders do 1647 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1648 && !msg->findInt32("bitrate", &bitRate)) { 1649 return INVALID_OPERATION; 1650 } 1651 1652 // propagate bitrate to the output so that the muxer has it 1653 if (encoder && msg->findInt32("bitrate", &bitRate)) { 1654 // Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the 1655 // average bitrate. We've been setting both bitrate and max-bitrate to this same value. 1656 outputFormat->setInt32("bitrate", bitRate); 1657 outputFormat->setInt32("max-bitrate", bitRate); 1658 } 1659 1660 int32_t storeMeta; 1661 if (encoder 1662 && msg->findInt32("android._input-metadata-buffer-type", &storeMeta) 1663 && storeMeta != kMetadataBufferTypeInvalid) { 1664 mInputMetadataType = (MetadataBufferType)storeMeta; 1665 err = mOMX->storeMetaDataInBuffers( 1666 mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1667 if (err != OK) { 1668 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1669 mComponentName.c_str(), err); 1670 1671 return err; 1672 } else if (storeMeta == kMetadataBufferTypeANWBuffer 1673 && mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1674 // IOMX translates ANWBuffers to gralloc source already. 1675 mInputMetadataType = (MetadataBufferType)storeMeta; 1676 } 1677 1678 uint32_t usageBits; 1679 if (mOMX->getParameter( 1680 mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1681 &usageBits, sizeof(usageBits)) == OK) { 1682 inputFormat->setInt32( 1683 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1684 } 1685 } 1686 1687 int32_t prependSPSPPS = 0; 1688 if (encoder 1689 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1690 && prependSPSPPS != 0) { 1691 OMX_INDEXTYPE index; 1692 err = mOMX->getExtensionIndex( 1693 mNode, 1694 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1695 &index); 1696 1697 if (err == OK) { 1698 PrependSPSPPSToIDRFramesParams params; 1699 InitOMXParams(¶ms); 1700 params.bEnable = OMX_TRUE; 1701 1702 err = mOMX->setParameter( 1703 mNode, index, ¶ms, sizeof(params)); 1704 } 1705 1706 if (err != OK) { 1707 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1708 "IDR frames. (err %d)", err); 1709 1710 return err; 1711 } 1712 } 1713 1714 // Only enable metadata mode on encoder output if encoder can prepend 1715 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1716 // opaque handle, to which we don't have access. 1717 int32_t video = !strncasecmp(mime, "video/", 6); 1718 mIsVideo = video; 1719 if (encoder && video) { 1720 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1721 && msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta) 1722 && storeMeta != 0); 1723 1724 mOutputMetadataType = kMetadataBufferTypeNativeHandleSource; 1725 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType); 1726 if (err != OK) { 1727 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1728 mComponentName.c_str(), err); 1729 } 1730 1731 if (!msg->findInt64( 1732 "repeat-previous-frame-after", 1733 &mRepeatFrameDelayUs)) { 1734 mRepeatFrameDelayUs = -1ll; 1735 } 1736 1737 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1738 mMaxPtsGapUs = -1ll; 1739 } 1740 1741 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1742 mMaxFps = -1; 1743 } 1744 1745 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1746 mTimePerCaptureUs = -1ll; 1747 } 1748 1749 if (!msg->findInt32( 1750 "create-input-buffers-suspended", 1751 (int32_t*)&mCreateInputBuffersSuspended)) { 1752 mCreateInputBuffersSuspended = false; 1753 } 1754 } 1755 1756 // NOTE: we only use native window for video decoders 1757 sp<RefBase> obj; 1758 bool haveNativeWindow = msg->findObject("native-window", &obj) 1759 && obj != NULL && video && !encoder; 1760 mUsingNativeWindow = haveNativeWindow; 1761 mLegacyAdaptiveExperiment = false; 1762 if (video && !encoder) { 1763 inputFormat->setInt32("adaptive-playback", false); 1764 1765 int32_t usageProtected; 1766 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1767 if (!haveNativeWindow) { 1768 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1769 return PERMISSION_DENIED; 1770 } 1771 mFlags |= kFlagIsGrallocUsageProtected; 1772 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1773 } 1774 1775 if (mFlags & kFlagIsSecure) { 1776 // use native_handles for secure input buffers 1777 err = mOMX->enableNativeBuffers( 1778 mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1779 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1780 err = OK; // ignore error for now 1781 } 1782 } 1783 if (haveNativeWindow) { 1784 sp<ANativeWindow> nativeWindow = 1785 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1786 1787 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1788 int32_t autoFrc; 1789 if (msg->findInt32("auto-frc", &autoFrc)) { 1790 bool enabled = autoFrc; 1791 OMX_CONFIG_BOOLEANTYPE config; 1792 InitOMXParams(&config); 1793 config.bEnabled = (OMX_BOOL)enabled; 1794 status_t temp = mOMX->setConfig( 1795 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1796 &config, sizeof(config)); 1797 if (temp == OK) { 1798 outputFormat->setInt32("auto-frc", enabled); 1799 } else if (enabled) { 1800 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1801 } 1802 } 1803 // END of temporary support for automatic FRC 1804 1805 int32_t tunneled; 1806 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1807 tunneled != 0) { 1808 ALOGI("Configuring TUNNELED video playback."); 1809 mTunneled = true; 1810 1811 int32_t audioHwSync = 0; 1812 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1813 ALOGW("No Audio HW Sync provided for video tunnel"); 1814 } 1815 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1816 if (err != OK) { 1817 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1818 audioHwSync, nativeWindow.get()); 1819 return err; 1820 } 1821 1822 int32_t maxWidth = 0, maxHeight = 0; 1823 if (msg->findInt32("max-width", &maxWidth) && 1824 msg->findInt32("max-height", &maxHeight)) { 1825 1826 err = mOMX->prepareForAdaptivePlayback( 1827 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1828 if (err != OK) { 1829 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1830 mComponentName.c_str(), err); 1831 // allow failure 1832 err = OK; 1833 } else { 1834 inputFormat->setInt32("max-width", maxWidth); 1835 inputFormat->setInt32("max-height", maxHeight); 1836 inputFormat->setInt32("adaptive-playback", true); 1837 } 1838 } 1839 } else { 1840 ALOGV("Configuring CPU controlled video playback."); 1841 mTunneled = false; 1842 1843 // Explicity reset the sideband handle of the window for 1844 // non-tunneled video in case the window was previously used 1845 // for a tunneled video playback. 1846 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1847 if (err != OK) { 1848 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1849 return err; 1850 } 1851 1852 // Always try to enable dynamic output buffers on native surface 1853 mOutputMetadataType = kMetadataBufferTypeANWBuffer; 1854 err = mOMX->storeMetaDataInBuffers( 1855 mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1856 if (err != OK) { 1857 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1858 mComponentName.c_str(), err); 1859 1860 // if adaptive playback has been requested, try JB fallback 1861 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1862 // LARGE MEMORY REQUIREMENT 1863 1864 // we will not do adaptive playback on software accessed 1865 // surfaces as they never had to respond to changes in the 1866 // crop window, and we don't trust that they will be able to. 1867 int usageBits = 0; 1868 bool canDoAdaptivePlayback; 1869 1870 if (nativeWindow->query( 1871 nativeWindow.get(), 1872 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1873 &usageBits) != OK) { 1874 canDoAdaptivePlayback = false; 1875 } else { 1876 canDoAdaptivePlayback = 1877 (usageBits & 1878 (GRALLOC_USAGE_SW_READ_MASK | 1879 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1880 } 1881 1882 int32_t maxWidth = 0, maxHeight = 0; 1883 if (canDoAdaptivePlayback && 1884 msg->findInt32("max-width", &maxWidth) && 1885 msg->findInt32("max-height", &maxHeight)) { 1886 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1887 mComponentName.c_str(), maxWidth, maxHeight); 1888 1889 err = mOMX->prepareForAdaptivePlayback( 1890 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1891 maxHeight); 1892 ALOGW_IF(err != OK, 1893 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1894 mComponentName.c_str(), err); 1895 1896 if (err == OK) { 1897 inputFormat->setInt32("max-width", maxWidth); 1898 inputFormat->setInt32("max-height", maxHeight); 1899 inputFormat->setInt32("adaptive-playback", true); 1900 } 1901 } 1902 // allow failure 1903 err = OK; 1904 } else { 1905 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1906 mComponentName.c_str()); 1907 CHECK(storingMetadataInDecodedBuffers()); 1908 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 1909 "legacy-adaptive", !msg->contains("no-experiments")); 1910 1911 inputFormat->setInt32("adaptive-playback", true); 1912 } 1913 1914 int32_t push; 1915 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 1916 && push != 0) { 1917 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1918 } 1919 } 1920 1921 int32_t rotationDegrees; 1922 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 1923 mRotationDegrees = rotationDegrees; 1924 } else { 1925 mRotationDegrees = 0; 1926 } 1927 } 1928 1929 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 1930 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 1931 // invalid encodings will default to PCM-16bit in setupRawAudioFormat. 1932 1933 if (video) { 1934 // determine need for software renderer 1935 bool usingSwRenderer = false; 1936 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 1937 usingSwRenderer = true; 1938 haveNativeWindow = false; 1939 } 1940 1941 if (encoder) { 1942 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 1943 } else { 1944 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 1945 } 1946 1947 if (err != OK) { 1948 return err; 1949 } 1950 1951 if (haveNativeWindow) { 1952 mNativeWindow = static_cast<Surface *>(obj.get()); 1953 } 1954 1955 // initialize native window now to get actual output format 1956 // TODO: this is needed for some encoders even though they don't use native window 1957 err = initNativeWindow(); 1958 if (err != OK) { 1959 return err; 1960 } 1961 1962 // fallback for devices that do not handle flex-YUV for native buffers 1963 if (haveNativeWindow) { 1964 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 1965 if (msg->findInt32("color-format", &requestedColorFormat) && 1966 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 1967 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 1968 if (err != OK) { 1969 return err; 1970 } 1971 int32_t colorFormat = OMX_COLOR_FormatUnused; 1972 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 1973 if (!outputFormat->findInt32("color-format", &colorFormat)) { 1974 ALOGE("ouptut port did not have a color format (wrong domain?)"); 1975 return BAD_VALUE; 1976 } 1977 ALOGD("[%s] Requested output format %#x and got %#x.", 1978 mComponentName.c_str(), requestedColorFormat, colorFormat); 1979 if (!IsFlexibleColorFormat( 1980 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 1981 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 1982 // device did not handle flex-YUV request for native window, fall back 1983 // to SW renderer 1984 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 1985 mNativeWindow.clear(); 1986 mNativeWindowUsageBits = 0; 1987 haveNativeWindow = false; 1988 usingSwRenderer = true; 1989 if (storingMetadataInDecodedBuffers()) { 1990 err = mOMX->storeMetaDataInBuffers( 1991 mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 1992 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 1993 // TODO: implement adaptive-playback support for bytebuffer mode. 1994 // This is done by SW codecs, but most HW codecs don't support it. 1995 inputFormat->setInt32("adaptive-playback", false); 1996 } 1997 if (err == OK) { 1998 err = mOMX->enableNativeBuffers( 1999 mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 2000 } 2001 if (mFlags & kFlagIsGrallocUsageProtected) { 2002 // fallback is not supported for protected playback 2003 err = PERMISSION_DENIED; 2004 } else if (err == OK) { 2005 err = setupVideoDecoder( 2006 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2007 } 2008 } 2009 } 2010 } 2011 2012 if (usingSwRenderer) { 2013 outputFormat->setInt32("using-sw-renderer", 1); 2014 } 2015 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2016 int32_t numChannels, sampleRate; 2017 if (!msg->findInt32("channel-count", &numChannels) 2018 || !msg->findInt32("sample-rate", &sampleRate)) { 2019 // Since we did not always check for these, leave them optional 2020 // and have the decoder figure it all out. 2021 err = OK; 2022 } else { 2023 err = setupRawAudioFormat( 2024 encoder ? kPortIndexInput : kPortIndexOutput, 2025 sampleRate, 2026 numChannels); 2027 } 2028 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2029 int32_t numChannels, sampleRate; 2030 if (!msg->findInt32("channel-count", &numChannels) 2031 || !msg->findInt32("sample-rate", &sampleRate)) { 2032 err = INVALID_OPERATION; 2033 } else { 2034 int32_t isADTS, aacProfile; 2035 int32_t sbrMode; 2036 int32_t maxOutputChannelCount; 2037 int32_t pcmLimiterEnable; 2038 drcParams_t drc; 2039 if (!msg->findInt32("is-adts", &isADTS)) { 2040 isADTS = 0; 2041 } 2042 if (!msg->findInt32("aac-profile", &aacProfile)) { 2043 aacProfile = OMX_AUDIO_AACObjectNull; 2044 } 2045 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2046 sbrMode = -1; 2047 } 2048 2049 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2050 maxOutputChannelCount = -1; 2051 } 2052 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2053 // value is unknown 2054 pcmLimiterEnable = -1; 2055 } 2056 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2057 // value is unknown 2058 drc.encodedTargetLevel = -1; 2059 } 2060 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2061 // value is unknown 2062 drc.drcCut = -1; 2063 } 2064 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2065 // value is unknown 2066 drc.drcBoost = -1; 2067 } 2068 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2069 // value is unknown 2070 drc.heavyCompression = -1; 2071 } 2072 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2073 // value is unknown 2074 drc.targetRefLevel = -1; 2075 } 2076 2077 err = setupAACCodec( 2078 encoder, numChannels, sampleRate, bitRate, aacProfile, 2079 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2080 pcmLimiterEnable); 2081 } 2082 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2083 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2084 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2085 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2086 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2087 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2088 // These are PCM-like formats with a fixed sample rate but 2089 // a variable number of channels. 2090 2091 int32_t numChannels; 2092 if (!msg->findInt32("channel-count", &numChannels)) { 2093 err = INVALID_OPERATION; 2094 } else { 2095 int32_t sampleRate; 2096 if (!msg->findInt32("sample-rate", &sampleRate)) { 2097 sampleRate = 8000; 2098 } 2099 err = setupG711Codec(encoder, sampleRate, numChannels); 2100 } 2101 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2102 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2103 if (encoder && 2104 (!msg->findInt32("channel-count", &numChannels) 2105 || !msg->findInt32("sample-rate", &sampleRate))) { 2106 ALOGE("missing channel count or sample rate for FLAC encoder"); 2107 err = INVALID_OPERATION; 2108 } else { 2109 if (encoder) { 2110 if (!msg->findInt32( 2111 "complexity", &compressionLevel) && 2112 !msg->findInt32( 2113 "flac-compression-level", &compressionLevel)) { 2114 compressionLevel = 5; // default FLAC compression level 2115 } else if (compressionLevel < 0) { 2116 ALOGW("compression level %d outside [0..8] range, " 2117 "using 0", 2118 compressionLevel); 2119 compressionLevel = 0; 2120 } else if (compressionLevel > 8) { 2121 ALOGW("compression level %d outside [0..8] range, " 2122 "using 8", 2123 compressionLevel); 2124 compressionLevel = 8; 2125 } 2126 } 2127 err = setupFlacCodec( 2128 encoder, numChannels, sampleRate, compressionLevel); 2129 } 2130 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2131 int32_t numChannels, sampleRate; 2132 if (encoder 2133 || !msg->findInt32("channel-count", &numChannels) 2134 || !msg->findInt32("sample-rate", &sampleRate)) { 2135 err = INVALID_OPERATION; 2136 } else { 2137 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); 2138 } 2139 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2140 int32_t numChannels; 2141 int32_t sampleRate; 2142 if (!msg->findInt32("channel-count", &numChannels) 2143 || !msg->findInt32("sample-rate", &sampleRate)) { 2144 err = INVALID_OPERATION; 2145 } else { 2146 err = setupAC3Codec(encoder, numChannels, sampleRate); 2147 } 2148 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2149 int32_t numChannels; 2150 int32_t sampleRate; 2151 if (!msg->findInt32("channel-count", &numChannels) 2152 || !msg->findInt32("sample-rate", &sampleRate)) { 2153 err = INVALID_OPERATION; 2154 } else { 2155 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2156 } 2157 } 2158 2159 if (err != OK) { 2160 return err; 2161 } 2162 2163 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2164 mEncoderDelay = 0; 2165 } 2166 2167 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2168 mEncoderPadding = 0; 2169 } 2170 2171 if (msg->findInt32("channel-mask", &mChannelMask)) { 2172 mChannelMaskPresent = true; 2173 } else { 2174 mChannelMaskPresent = false; 2175 } 2176 2177 int32_t maxInputSize; 2178 if (msg->findInt32("max-input-size", &maxInputSize)) { 2179 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2180 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2181 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2182 } 2183 2184 int32_t priority; 2185 if (msg->findInt32("priority", &priority)) { 2186 err = setPriority(priority); 2187 } 2188 2189 int32_t rateInt = -1; 2190 float rateFloat = -1; 2191 if (!msg->findFloat("operating-rate", &rateFloat)) { 2192 msg->findInt32("operating-rate", &rateInt); 2193 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2194 } 2195 if (rateFloat > 0) { 2196 err = setOperatingRate(rateFloat, video); 2197 } 2198 2199 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame. 2200 mBaseOutputFormat = outputFormat; 2201 // trigger a kWhatOutputFormatChanged msg on first buffer 2202 mLastOutputFormat.clear(); 2203 2204 err = getPortFormat(kPortIndexInput, inputFormat); 2205 if (err == OK) { 2206 err = getPortFormat(kPortIndexOutput, outputFormat); 2207 if (err == OK) { 2208 mInputFormat = inputFormat; 2209 mOutputFormat = outputFormat; 2210 } 2211 } 2212 2213 // create data converters if needed 2214 if (!video && err == OK) { 2215 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 2216 if (encoder) { 2217 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2218 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); 2219 if (mConverter[kPortIndexInput] != NULL) { 2220 mInputFormat->setInt32("pcm-encoding", pcmEncoding); 2221 } 2222 } else { 2223 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2224 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 2225 if (mConverter[kPortIndexOutput] != NULL) { 2226 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 2227 } 2228 } 2229 } 2230 2231 return err; 2232} 2233 2234status_t ACodec::setPriority(int32_t priority) { 2235 if (priority < 0) { 2236 return BAD_VALUE; 2237 } 2238 OMX_PARAM_U32TYPE config; 2239 InitOMXParams(&config); 2240 config.nU32 = (OMX_U32)priority; 2241 status_t temp = mOMX->setConfig( 2242 mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2243 &config, sizeof(config)); 2244 if (temp != OK) { 2245 ALOGI("codec does not support config priority (err %d)", temp); 2246 } 2247 return OK; 2248} 2249 2250status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2251 if (rateFloat < 0) { 2252 return BAD_VALUE; 2253 } 2254 OMX_U32 rate; 2255 if (isVideo) { 2256 if (rateFloat > 65535) { 2257 return BAD_VALUE; 2258 } 2259 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2260 } else { 2261 if (rateFloat > UINT_MAX) { 2262 return BAD_VALUE; 2263 } 2264 rate = (OMX_U32)(rateFloat); 2265 } 2266 OMX_PARAM_U32TYPE config; 2267 InitOMXParams(&config); 2268 config.nU32 = rate; 2269 status_t err = mOMX->setConfig( 2270 mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2271 &config, sizeof(config)); 2272 if (err != OK) { 2273 ALOGI("codec does not support config operating rate (err %d)", err); 2274 } 2275 return OK; 2276} 2277 2278status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2279 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2280 InitOMXParams(¶ms); 2281 params.nPortIndex = kPortIndexOutput; 2282 status_t err = mOMX->getConfig( 2283 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2284 if (err == OK) { 2285 *intraRefreshPeriod = params.nRefreshPeriod; 2286 return OK; 2287 } 2288 2289 // Fallback to query through standard OMX index. 2290 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2291 InitOMXParams(&refreshParams); 2292 refreshParams.nPortIndex = kPortIndexOutput; 2293 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2294 err = mOMX->getParameter( 2295 mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2296 if (err != OK || refreshParams.nCirMBs == 0) { 2297 *intraRefreshPeriod = 0; 2298 return OK; 2299 } 2300 2301 // Calculate period based on width and height 2302 uint32_t width, height; 2303 OMX_PARAM_PORTDEFINITIONTYPE def; 2304 InitOMXParams(&def); 2305 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2306 def.nPortIndex = kPortIndexOutput; 2307 err = mOMX->getParameter( 2308 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2309 if (err != OK) { 2310 *intraRefreshPeriod = 0; 2311 return err; 2312 } 2313 width = video_def->nFrameWidth; 2314 height = video_def->nFrameHeight; 2315 // Use H.264/AVC MacroBlock size 16x16 2316 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2317 2318 return OK; 2319} 2320 2321status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2322 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2323 InitOMXParams(¶ms); 2324 params.nPortIndex = kPortIndexOutput; 2325 params.nRefreshPeriod = intraRefreshPeriod; 2326 status_t err = mOMX->setConfig( 2327 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2328 if (err == OK) { 2329 return OK; 2330 } 2331 2332 // Only in configure state, a component could invoke setParameter. 2333 if (!inConfigure) { 2334 return INVALID_OPERATION; 2335 } else { 2336 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2337 } 2338 2339 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2340 InitOMXParams(&refreshParams); 2341 refreshParams.nPortIndex = kPortIndexOutput; 2342 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2343 2344 if (intraRefreshPeriod == 0) { 2345 // 0 means disable intra refresh. 2346 refreshParams.nCirMBs = 0; 2347 } else { 2348 // Calculate macroblocks that need to be intra coded base on width and height 2349 uint32_t width, height; 2350 OMX_PARAM_PORTDEFINITIONTYPE def; 2351 InitOMXParams(&def); 2352 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2353 def.nPortIndex = kPortIndexOutput; 2354 err = mOMX->getParameter( 2355 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2356 if (err != OK) { 2357 return err; 2358 } 2359 width = video_def->nFrameWidth; 2360 height = video_def->nFrameHeight; 2361 // Use H.264/AVC MacroBlock size 16x16 2362 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2363 } 2364 2365 err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh, 2366 &refreshParams, sizeof(refreshParams)); 2367 if (err != OK) { 2368 return err; 2369 } 2370 2371 return OK; 2372} 2373 2374status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2375 OMX_PARAM_PORTDEFINITIONTYPE def; 2376 InitOMXParams(&def); 2377 def.nPortIndex = portIndex; 2378 2379 status_t err = mOMX->getParameter( 2380 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2381 2382 if (err != OK) { 2383 return err; 2384 } 2385 2386 if (def.nBufferSize >= size) { 2387 return OK; 2388 } 2389 2390 def.nBufferSize = size; 2391 2392 err = mOMX->setParameter( 2393 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2394 2395 if (err != OK) { 2396 return err; 2397 } 2398 2399 err = mOMX->getParameter( 2400 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2401 2402 if (err != OK) { 2403 return err; 2404 } 2405 2406 if (def.nBufferSize < size) { 2407 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2408 return FAILED_TRANSACTION; 2409 } 2410 2411 return OK; 2412} 2413 2414status_t ACodec::selectAudioPortFormat( 2415 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2416 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2417 InitOMXParams(&format); 2418 2419 format.nPortIndex = portIndex; 2420 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2421 format.nIndex = index; 2422 status_t err = mOMX->getParameter( 2423 mNode, OMX_IndexParamAudioPortFormat, 2424 &format, sizeof(format)); 2425 2426 if (err != OK) { 2427 return err; 2428 } 2429 2430 if (format.eEncoding == desiredFormat) { 2431 break; 2432 } 2433 2434 if (index == kMaxIndicesToCheck) { 2435 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2436 mComponentName.c_str(), index, 2437 asString(format.eEncoding), format.eEncoding); 2438 return ERROR_UNSUPPORTED; 2439 } 2440 } 2441 2442 return mOMX->setParameter( 2443 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2444} 2445 2446status_t ACodec::setupAACCodec( 2447 bool encoder, int32_t numChannels, int32_t sampleRate, 2448 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2449 int32_t maxOutputChannelCount, const drcParams_t& drc, 2450 int32_t pcmLimiterEnable) { 2451 if (encoder && isADTS) { 2452 return -EINVAL; 2453 } 2454 2455 status_t err = setupRawAudioFormat( 2456 encoder ? kPortIndexInput : kPortIndexOutput, 2457 sampleRate, 2458 numChannels); 2459 2460 if (err != OK) { 2461 return err; 2462 } 2463 2464 if (encoder) { 2465 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2466 2467 if (err != OK) { 2468 return err; 2469 } 2470 2471 OMX_PARAM_PORTDEFINITIONTYPE def; 2472 InitOMXParams(&def); 2473 def.nPortIndex = kPortIndexOutput; 2474 2475 err = mOMX->getParameter( 2476 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2477 2478 if (err != OK) { 2479 return err; 2480 } 2481 2482 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2483 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2484 2485 err = mOMX->setParameter( 2486 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2487 2488 if (err != OK) { 2489 return err; 2490 } 2491 2492 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2493 InitOMXParams(&profile); 2494 profile.nPortIndex = kPortIndexOutput; 2495 2496 err = mOMX->getParameter( 2497 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2498 2499 if (err != OK) { 2500 return err; 2501 } 2502 2503 profile.nChannels = numChannels; 2504 2505 profile.eChannelMode = 2506 (numChannels == 1) 2507 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2508 2509 profile.nSampleRate = sampleRate; 2510 profile.nBitRate = bitRate; 2511 profile.nAudioBandWidth = 0; 2512 profile.nFrameLength = 0; 2513 profile.nAACtools = OMX_AUDIO_AACToolAll; 2514 profile.nAACERtools = OMX_AUDIO_AACERNone; 2515 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2516 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2517 switch (sbrMode) { 2518 case 0: 2519 // disable sbr 2520 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2521 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2522 break; 2523 case 1: 2524 // enable single-rate sbr 2525 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2526 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2527 break; 2528 case 2: 2529 // enable dual-rate sbr 2530 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2531 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2532 break; 2533 case -1: 2534 // enable both modes -> the codec will decide which mode should be used 2535 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2536 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2537 break; 2538 default: 2539 // unsupported sbr mode 2540 return BAD_VALUE; 2541 } 2542 2543 2544 err = mOMX->setParameter( 2545 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2546 2547 if (err != OK) { 2548 return err; 2549 } 2550 2551 return err; 2552 } 2553 2554 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2555 InitOMXParams(&profile); 2556 profile.nPortIndex = kPortIndexInput; 2557 2558 err = mOMX->getParameter( 2559 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2560 2561 if (err != OK) { 2562 return err; 2563 } 2564 2565 profile.nChannels = numChannels; 2566 profile.nSampleRate = sampleRate; 2567 2568 profile.eAACStreamFormat = 2569 isADTS 2570 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2571 : OMX_AUDIO_AACStreamFormatMP4FF; 2572 2573 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2574 InitOMXParams(&presentation); 2575 presentation.nMaxOutputChannels = maxOutputChannelCount; 2576 presentation.nDrcCut = drc.drcCut; 2577 presentation.nDrcBoost = drc.drcBoost; 2578 presentation.nHeavyCompression = drc.heavyCompression; 2579 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2580 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2581 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2582 2583 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2584 if (res == OK) { 2585 // optional parameters, will not cause configuration failure 2586 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2587 &presentation, sizeof(presentation)); 2588 } else { 2589 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2590 } 2591 mSampleRate = sampleRate; 2592 return res; 2593} 2594 2595status_t ACodec::setupAC3Codec( 2596 bool encoder, int32_t numChannels, int32_t sampleRate) { 2597 status_t err = setupRawAudioFormat( 2598 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2599 2600 if (err != OK) { 2601 return err; 2602 } 2603 2604 if (encoder) { 2605 ALOGW("AC3 encoding is not supported."); 2606 return INVALID_OPERATION; 2607 } 2608 2609 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2610 InitOMXParams(&def); 2611 def.nPortIndex = kPortIndexInput; 2612 2613 err = mOMX->getParameter( 2614 mNode, 2615 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2616 &def, 2617 sizeof(def)); 2618 2619 if (err != OK) { 2620 return err; 2621 } 2622 2623 def.nChannels = numChannels; 2624 def.nSampleRate = sampleRate; 2625 2626 return mOMX->setParameter( 2627 mNode, 2628 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2629 &def, 2630 sizeof(def)); 2631} 2632 2633status_t ACodec::setupEAC3Codec( 2634 bool encoder, int32_t numChannels, int32_t sampleRate) { 2635 status_t err = setupRawAudioFormat( 2636 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2637 2638 if (err != OK) { 2639 return err; 2640 } 2641 2642 if (encoder) { 2643 ALOGW("EAC3 encoding is not supported."); 2644 return INVALID_OPERATION; 2645 } 2646 2647 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2648 InitOMXParams(&def); 2649 def.nPortIndex = kPortIndexInput; 2650 2651 err = mOMX->getParameter( 2652 mNode, 2653 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2654 &def, 2655 sizeof(def)); 2656 2657 if (err != OK) { 2658 return err; 2659 } 2660 2661 def.nChannels = numChannels; 2662 def.nSampleRate = sampleRate; 2663 2664 return mOMX->setParameter( 2665 mNode, 2666 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2667 &def, 2668 sizeof(def)); 2669} 2670 2671static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2672 bool isAMRWB, int32_t bps) { 2673 if (isAMRWB) { 2674 if (bps <= 6600) { 2675 return OMX_AUDIO_AMRBandModeWB0; 2676 } else if (bps <= 8850) { 2677 return OMX_AUDIO_AMRBandModeWB1; 2678 } else if (bps <= 12650) { 2679 return OMX_AUDIO_AMRBandModeWB2; 2680 } else if (bps <= 14250) { 2681 return OMX_AUDIO_AMRBandModeWB3; 2682 } else if (bps <= 15850) { 2683 return OMX_AUDIO_AMRBandModeWB4; 2684 } else if (bps <= 18250) { 2685 return OMX_AUDIO_AMRBandModeWB5; 2686 } else if (bps <= 19850) { 2687 return OMX_AUDIO_AMRBandModeWB6; 2688 } else if (bps <= 23050) { 2689 return OMX_AUDIO_AMRBandModeWB7; 2690 } 2691 2692 // 23850 bps 2693 return OMX_AUDIO_AMRBandModeWB8; 2694 } else { // AMRNB 2695 if (bps <= 4750) { 2696 return OMX_AUDIO_AMRBandModeNB0; 2697 } else if (bps <= 5150) { 2698 return OMX_AUDIO_AMRBandModeNB1; 2699 } else if (bps <= 5900) { 2700 return OMX_AUDIO_AMRBandModeNB2; 2701 } else if (bps <= 6700) { 2702 return OMX_AUDIO_AMRBandModeNB3; 2703 } else if (bps <= 7400) { 2704 return OMX_AUDIO_AMRBandModeNB4; 2705 } else if (bps <= 7950) { 2706 return OMX_AUDIO_AMRBandModeNB5; 2707 } else if (bps <= 10200) { 2708 return OMX_AUDIO_AMRBandModeNB6; 2709 } 2710 2711 // 12200 bps 2712 return OMX_AUDIO_AMRBandModeNB7; 2713 } 2714} 2715 2716status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2717 OMX_AUDIO_PARAM_AMRTYPE def; 2718 InitOMXParams(&def); 2719 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2720 2721 status_t err = 2722 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2723 2724 if (err != OK) { 2725 return err; 2726 } 2727 2728 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2729 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2730 2731 err = mOMX->setParameter( 2732 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2733 2734 if (err != OK) { 2735 return err; 2736 } 2737 2738 return setupRawAudioFormat( 2739 encoder ? kPortIndexInput : kPortIndexOutput, 2740 isWAMR ? 16000 : 8000 /* sampleRate */, 2741 1 /* numChannels */); 2742} 2743 2744status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2745 if (encoder) { 2746 return INVALID_OPERATION; 2747 } 2748 2749 return setupRawAudioFormat( 2750 kPortIndexInput, sampleRate, numChannels); 2751} 2752 2753status_t ACodec::setupFlacCodec( 2754 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2755 2756 if (encoder) { 2757 OMX_AUDIO_PARAM_FLACTYPE def; 2758 InitOMXParams(&def); 2759 def.nPortIndex = kPortIndexOutput; 2760 2761 // configure compression level 2762 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2763 if (err != OK) { 2764 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2765 return err; 2766 } 2767 def.nCompressionLevel = compressionLevel; 2768 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2769 if (err != OK) { 2770 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2771 return err; 2772 } 2773 } 2774 2775 return setupRawAudioFormat( 2776 encoder ? kPortIndexInput : kPortIndexOutput, 2777 sampleRate, 2778 numChannels); 2779} 2780 2781status_t ACodec::setupRawAudioFormat( 2782 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { 2783 OMX_PARAM_PORTDEFINITIONTYPE def; 2784 InitOMXParams(&def); 2785 def.nPortIndex = portIndex; 2786 2787 status_t err = mOMX->getParameter( 2788 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2789 2790 if (err != OK) { 2791 return err; 2792 } 2793 2794 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2795 2796 err = mOMX->setParameter( 2797 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2798 2799 if (err != OK) { 2800 return err; 2801 } 2802 2803 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2804 InitOMXParams(&pcmParams); 2805 pcmParams.nPortIndex = portIndex; 2806 2807 err = mOMX->getParameter( 2808 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2809 2810 if (err != OK) { 2811 return err; 2812 } 2813 2814 pcmParams.nChannels = numChannels; 2815 switch (encoding) { 2816 case kAudioEncodingPcm8bit: 2817 pcmParams.eNumData = OMX_NumericalDataUnsigned; 2818 pcmParams.nBitPerSample = 8; 2819 break; 2820 case kAudioEncodingPcmFloat: 2821 pcmParams.eNumData = OMX_NumericalDataFloat; 2822 pcmParams.nBitPerSample = 32; 2823 break; 2824 case kAudioEncodingPcm16bit: 2825 pcmParams.eNumData = OMX_NumericalDataSigned; 2826 pcmParams.nBitPerSample = 16; 2827 break; 2828 default: 2829 return BAD_VALUE; 2830 } 2831 pcmParams.bInterleaved = OMX_TRUE; 2832 pcmParams.nSamplingRate = sampleRate; 2833 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 2834 2835 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 2836 return OMX_ErrorNone; 2837 } 2838 2839 err = mOMX->setParameter( 2840 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2841 // if we could not set up raw format to non-16-bit, try with 16-bit 2842 // NOTE: we will also verify this via readback, in case codec ignores these fields 2843 if (err != OK && encoding != kAudioEncodingPcm16bit) { 2844 pcmParams.eNumData = OMX_NumericalDataSigned; 2845 pcmParams.nBitPerSample = 16; 2846 err = mOMX->setParameter( 2847 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2848 } 2849 return err; 2850} 2851 2852status_t ACodec::configureTunneledVideoPlayback( 2853 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 2854 native_handle_t* sidebandHandle; 2855 2856 status_t err = mOMX->configureVideoTunnelMode( 2857 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 2858 if (err != OK) { 2859 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 2860 return err; 2861 } 2862 2863 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 2864 if (err != OK) { 2865 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 2866 sidebandHandle, err); 2867 return err; 2868 } 2869 2870 return OK; 2871} 2872 2873status_t ACodec::setVideoPortFormatType( 2874 OMX_U32 portIndex, 2875 OMX_VIDEO_CODINGTYPE compressionFormat, 2876 OMX_COLOR_FORMATTYPE colorFormat, 2877 bool usingNativeBuffers) { 2878 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2879 InitOMXParams(&format); 2880 format.nPortIndex = portIndex; 2881 format.nIndex = 0; 2882 bool found = false; 2883 2884 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2885 format.nIndex = index; 2886 status_t err = mOMX->getParameter( 2887 mNode, OMX_IndexParamVideoPortFormat, 2888 &format, sizeof(format)); 2889 2890 if (err != OK) { 2891 return err; 2892 } 2893 2894 // substitute back flexible color format to codec supported format 2895 OMX_U32 flexibleEquivalent; 2896 if (compressionFormat == OMX_VIDEO_CodingUnused 2897 && IsFlexibleColorFormat( 2898 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 2899 && colorFormat == flexibleEquivalent) { 2900 ALOGI("[%s] using color format %#x in place of %#x", 2901 mComponentName.c_str(), format.eColorFormat, colorFormat); 2902 colorFormat = format.eColorFormat; 2903 } 2904 2905 // The following assertion is violated by TI's video decoder. 2906 // CHECK_EQ(format.nIndex, index); 2907 2908 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 2909 if (portIndex == kPortIndexInput 2910 && colorFormat == format.eColorFormat) { 2911 // eCompressionFormat does not seem right. 2912 found = true; 2913 break; 2914 } 2915 if (portIndex == kPortIndexOutput 2916 && compressionFormat == format.eCompressionFormat) { 2917 // eColorFormat does not seem right. 2918 found = true; 2919 break; 2920 } 2921 } 2922 2923 if (format.eCompressionFormat == compressionFormat 2924 && format.eColorFormat == colorFormat) { 2925 found = true; 2926 break; 2927 } 2928 2929 if (index == kMaxIndicesToCheck) { 2930 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 2931 mComponentName.c_str(), index, 2932 asString(format.eCompressionFormat), format.eCompressionFormat, 2933 asString(format.eColorFormat), format.eColorFormat); 2934 } 2935 } 2936 2937 if (!found) { 2938 return UNKNOWN_ERROR; 2939 } 2940 2941 status_t err = mOMX->setParameter( 2942 mNode, OMX_IndexParamVideoPortFormat, 2943 &format, sizeof(format)); 2944 2945 return err; 2946} 2947 2948// Set optimal output format. OMX component lists output formats in the order 2949// of preference, but this got more complicated since the introduction of flexible 2950// YUV formats. We support a legacy behavior for applications that do not use 2951// surface output, do not specify an output format, but expect a "usable" standard 2952// OMX format. SW readable and standard formats must be flex-YUV. 2953// 2954// Suggested preference order: 2955// - optimal format for texture rendering (mediaplayer behavior) 2956// - optimal SW readable & texture renderable format (flex-YUV support) 2957// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 2958// - legacy "usable" standard formats 2959// 2960// For legacy support, we prefer a standard format, but will settle for a SW readable 2961// flex-YUV format. 2962status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 2963 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 2964 InitOMXParams(&format); 2965 format.nPortIndex = kPortIndexOutput; 2966 2967 InitOMXParams(&legacyFormat); 2968 // this field will change when we find a suitable legacy format 2969 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 2970 2971 for (OMX_U32 index = 0; ; ++index) { 2972 format.nIndex = index; 2973 status_t err = mOMX->getParameter( 2974 mNode, OMX_IndexParamVideoPortFormat, 2975 &format, sizeof(format)); 2976 if (err != OK) { 2977 // no more formats, pick legacy format if found 2978 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 2979 memcpy(&format, &legacyFormat, sizeof(format)); 2980 break; 2981 } 2982 return err; 2983 } 2984 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 2985 return OMX_ErrorBadParameter; 2986 } 2987 if (!getLegacyFlexibleFormat) { 2988 break; 2989 } 2990 // standard formats that were exposed to users before 2991 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 2992 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 2993 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 2994 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 2995 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 2996 break; 2997 } 2998 // find best legacy non-standard format 2999 OMX_U32 flexibleEquivalent; 3000 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 3001 && IsFlexibleColorFormat( 3002 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */, 3003 &flexibleEquivalent) 3004 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 3005 memcpy(&legacyFormat, &format, sizeof(format)); 3006 } 3007 } 3008 return mOMX->setParameter( 3009 mNode, OMX_IndexParamVideoPortFormat, 3010 &format, sizeof(format)); 3011} 3012 3013static const struct VideoCodingMapEntry { 3014 const char *mMime; 3015 OMX_VIDEO_CODINGTYPE mVideoCodingType; 3016} kVideoCodingMapEntry[] = { 3017 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 3018 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 3019 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 3020 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 3021 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 3022 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 3023 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 3024 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 3025}; 3026 3027static status_t GetVideoCodingTypeFromMime( 3028 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3029 for (size_t i = 0; 3030 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3031 ++i) { 3032 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3033 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3034 return OK; 3035 } 3036 } 3037 3038 *codingType = OMX_VIDEO_CodingUnused; 3039 3040 return ERROR_UNSUPPORTED; 3041} 3042 3043static status_t GetMimeTypeForVideoCoding( 3044 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3045 for (size_t i = 0; 3046 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3047 ++i) { 3048 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3049 *mime = kVideoCodingMapEntry[i].mMime; 3050 return OK; 3051 } 3052 } 3053 3054 mime->clear(); 3055 3056 return ERROR_UNSUPPORTED; 3057} 3058 3059status_t ACodec::setupVideoDecoder( 3060 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3061 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3062 int32_t width, height; 3063 if (!msg->findInt32("width", &width) 3064 || !msg->findInt32("height", &height)) { 3065 return INVALID_OPERATION; 3066 } 3067 3068 OMX_VIDEO_CODINGTYPE compressionFormat; 3069 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3070 3071 if (err != OK) { 3072 return err; 3073 } 3074 3075 if (compressionFormat == OMX_VIDEO_CodingVP9) { 3076 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 3077 InitOMXParams(¶ms); 3078 params.nPortIndex = kPortIndexInput; 3079 // Check if VP9 decoder advertises supported profiles. 3080 params.nProfileIndex = 0; 3081 status_t err = mOMX->getParameter( 3082 mNode, 3083 OMX_IndexParamVideoProfileLevelQuerySupported, 3084 ¶ms, 3085 sizeof(params)); 3086 mIsLegacyVP9Decoder = err != OK; 3087 } 3088 3089 err = setVideoPortFormatType( 3090 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3091 3092 if (err != OK) { 3093 return err; 3094 } 3095 3096 int32_t tmp; 3097 if (msg->findInt32("color-format", &tmp)) { 3098 OMX_COLOR_FORMATTYPE colorFormat = 3099 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3100 err = setVideoPortFormatType( 3101 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3102 if (err != OK) { 3103 ALOGW("[%s] does not support color format %d", 3104 mComponentName.c_str(), colorFormat); 3105 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3106 } 3107 } else { 3108 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3109 } 3110 3111 if (err != OK) { 3112 return err; 3113 } 3114 3115 int32_t frameRateInt; 3116 float frameRateFloat; 3117 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3118 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3119 frameRateInt = -1; 3120 } 3121 frameRateFloat = (float)frameRateInt; 3122 } 3123 3124 err = setVideoFormatOnPort( 3125 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3126 3127 if (err != OK) { 3128 return err; 3129 } 3130 3131 err = setVideoFormatOnPort( 3132 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3133 3134 if (err != OK) { 3135 return err; 3136 } 3137 3138 err = setColorAspectsForVideoDecoder( 3139 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3140 if (err == ERROR_UNSUPPORTED) { // support is optional 3141 err = OK; 3142 } 3143 3144 if (err != OK) { 3145 return err; 3146 } 3147 3148 err = setHDRStaticInfoForVideoCodec(kPortIndexOutput, msg, outputFormat); 3149 if (err == ERROR_UNSUPPORTED) { // support is optional 3150 err = OK; 3151 } 3152 return err; 3153} 3154 3155status_t ACodec::initDescribeColorAspectsIndex() { 3156 status_t err = mOMX->getExtensionIndex( 3157 mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3158 if (err != OK) { 3159 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3160 } 3161 return err; 3162} 3163 3164status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3165 status_t err = ERROR_UNSUPPORTED; 3166 if (mDescribeColorAspectsIndex) { 3167 err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3168 } 3169 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3170 mComponentName.c_str(), 3171 params.sAspects.mRange, asString(params.sAspects.mRange), 3172 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3173 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3174 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3175 err, asString(err)); 3176 3177 if (verify && err == OK) { 3178 err = getCodecColorAspects(params); 3179 } 3180 3181 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3182 "[%s] setting color aspects failed even though codec advertises support", 3183 mComponentName.c_str()); 3184 return err; 3185} 3186 3187status_t ACodec::setColorAspectsForVideoDecoder( 3188 int32_t width, int32_t height, bool usingNativeWindow, 3189 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3190 DescribeColorAspectsParams params; 3191 InitOMXParams(¶ms); 3192 params.nPortIndex = kPortIndexOutput; 3193 3194 getColorAspectsFromFormat(configFormat, params.sAspects); 3195 if (usingNativeWindow) { 3196 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3197 // The default aspects will be set back to the output format during the 3198 // getFormat phase of configure(). Set non-Unspecified values back into the 3199 // format, in case component does not support this enumeration. 3200 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3201 } 3202 3203 (void)initDescribeColorAspectsIndex(); 3204 3205 // communicate color aspects to codec 3206 return setCodecColorAspects(params); 3207} 3208 3209status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3210 status_t err = ERROR_UNSUPPORTED; 3211 if (mDescribeColorAspectsIndex) { 3212 err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3213 } 3214 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3215 mComponentName.c_str(), 3216 params.sAspects.mRange, asString(params.sAspects.mRange), 3217 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3218 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3219 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3220 err, asString(err)); 3221 if (params.bRequestingDataSpace) { 3222 ALOGV("for dataspace %#x", params.nDataSpace); 3223 } 3224 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3225 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3226 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3227 mComponentName.c_str()); 3228 } 3229 return err; 3230} 3231 3232status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3233 DescribeColorAspectsParams params; 3234 InitOMXParams(¶ms); 3235 params.nPortIndex = kPortIndexInput; 3236 status_t err = getCodecColorAspects(params); 3237 if (err == OK) { 3238 // we only set encoder input aspects if codec supports them 3239 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3240 } 3241 return err; 3242} 3243 3244status_t ACodec::getDataSpace( 3245 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3246 bool tryCodec) { 3247 status_t err = OK; 3248 if (tryCodec) { 3249 // request dataspace guidance from codec. 3250 params.bRequestingDataSpace = OMX_TRUE; 3251 err = getCodecColorAspects(params); 3252 params.bRequestingDataSpace = OMX_FALSE; 3253 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3254 *dataSpace = (android_dataspace)params.nDataSpace; 3255 return err; 3256 } else if (err == ERROR_UNSUPPORTED) { 3257 // ignore not-implemented error for dataspace requests 3258 err = OK; 3259 } 3260 } 3261 3262 // this returns legacy versions if available 3263 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3264 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3265 "and dataspace %#x", 3266 mComponentName.c_str(), 3267 params.sAspects.mRange, asString(params.sAspects.mRange), 3268 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3269 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3270 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3271 *dataSpace); 3272 return err; 3273} 3274 3275 3276status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3277 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3278 android_dataspace *dataSpace) { 3279 DescribeColorAspectsParams params; 3280 InitOMXParams(¶ms); 3281 params.nPortIndex = kPortIndexOutput; 3282 3283 // reset default format and get resulting format 3284 getColorAspectsFromFormat(configFormat, params.sAspects); 3285 if (dataSpace != NULL) { 3286 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3287 } 3288 status_t err = setCodecColorAspects(params, true /* readBack */); 3289 3290 // we always set specified aspects for decoders 3291 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3292 3293 if (dataSpace != NULL) { 3294 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3295 if (err == OK) { 3296 err = res; 3297 } 3298 } 3299 3300 return err; 3301} 3302 3303// initial video encoder setup for bytebuffer mode 3304status_t ACodec::setColorAspectsForVideoEncoder( 3305 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3306 // copy config to output format as this is not exposed via getFormat 3307 copyColorConfig(configFormat, outputFormat); 3308 3309 DescribeColorAspectsParams params; 3310 InitOMXParams(¶ms); 3311 params.nPortIndex = kPortIndexInput; 3312 getColorAspectsFromFormat(configFormat, params.sAspects); 3313 3314 (void)initDescribeColorAspectsIndex(); 3315 3316 int32_t usingRecorder; 3317 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3318 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3319 int32_t width, height; 3320 if (configFormat->findInt32("width", &width) 3321 && configFormat->findInt32("height", &height)) { 3322 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3323 status_t err = getDataSpace( 3324 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3325 if (err != OK) { 3326 return err; 3327 } 3328 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3329 } 3330 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3331 } 3332 3333 // communicate color aspects to codec, but do not allow change of the platform aspects 3334 ColorAspects origAspects = params.sAspects; 3335 for (int triesLeft = 2; --triesLeft >= 0; ) { 3336 status_t err = setCodecColorAspects(params, true /* readBack */); 3337 if (err != OK 3338 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3339 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3340 return err; 3341 } 3342 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3343 mComponentName.c_str()); 3344 } 3345 return OK; 3346} 3347 3348status_t ACodec::setHDRStaticInfoForVideoCodec( 3349 OMX_U32 portIndex, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3350 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3351 3352 DescribeHDRStaticInfoParams params; 3353 InitOMXParams(¶ms); 3354 params.nPortIndex = portIndex; 3355 3356 HDRStaticInfo *info = ¶ms.sInfo; 3357 if (getHDRStaticInfoFromFormat(configFormat, info)) { 3358 setHDRStaticInfoIntoFormat(params.sInfo, outputFormat); 3359 } 3360 3361 (void)initDescribeHDRStaticInfoIndex(); 3362 3363 // communicate HDR static Info to codec 3364 return setHDRStaticInfo(params); 3365} 3366 3367// subsequent initial video encoder setup for surface mode 3368status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3369 android_dataspace *dataSpace /* nonnull */) { 3370 DescribeColorAspectsParams params; 3371 InitOMXParams(¶ms); 3372 params.nPortIndex = kPortIndexInput; 3373 ColorAspects &aspects = params.sAspects; 3374 3375 // reset default format and store resulting format into both input and output formats 3376 getColorAspectsFromFormat(mConfigFormat, aspects); 3377 int32_t width, height; 3378 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3379 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3380 } 3381 setColorAspectsIntoFormat(aspects, mInputFormat); 3382 setColorAspectsIntoFormat(aspects, mOutputFormat); 3383 3384 // communicate color aspects to codec, but do not allow any change 3385 ColorAspects origAspects = aspects; 3386 status_t err = OK; 3387 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3388 status_t err = setCodecColorAspects(params, true /* readBack */); 3389 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3390 break; 3391 } 3392 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3393 mComponentName.c_str()); 3394 } 3395 3396 *dataSpace = HAL_DATASPACE_BT709; 3397 aspects = origAspects; // restore desired color aspects 3398 status_t res = getDataSpace( 3399 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3400 if (err == OK) { 3401 err = res; 3402 } 3403 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3404 mInputFormat->setBuffer( 3405 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3406 3407 // update input format with codec supported color aspects (basically set unsupported 3408 // aspects to Unspecified) 3409 if (err == OK) { 3410 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3411 } 3412 3413 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3414 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3415 3416 return err; 3417} 3418 3419status_t ACodec::getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format) { 3420 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3421 DescribeHDRStaticInfoParams params; 3422 InitOMXParams(¶ms); 3423 params.nPortIndex = portIndex; 3424 3425 status_t err = getHDRStaticInfo(params); 3426 if (err == OK) { 3427 // we only set decodec output HDRStaticInfo if codec supports them 3428 setHDRStaticInfoIntoFormat(params.sInfo, format); 3429 } 3430 return err; 3431} 3432 3433status_t ACodec::initDescribeHDRStaticInfoIndex() { 3434 status_t err = mOMX->getExtensionIndex( 3435 mNode, "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex); 3436 if (err != OK) { 3437 mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0; 3438 } 3439 return err; 3440} 3441 3442status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) { 3443 status_t err = ERROR_UNSUPPORTED; 3444 if (mDescribeHDRStaticInfoIndex) { 3445 err = mOMX->setConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3446 } 3447 3448 const HDRStaticInfo *info = ¶ms.sInfo; 3449 ALOGV("[%s] setting HDRStaticInfo (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, " 3450 "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)", 3451 mComponentName.c_str(), 3452 info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y, 3453 info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y, 3454 info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance, 3455 info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel); 3456 3457 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3458 "[%s] setting HDRStaticInfo failed even though codec advertises support", 3459 mComponentName.c_str()); 3460 return err; 3461} 3462 3463status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms) { 3464 status_t err = ERROR_UNSUPPORTED; 3465 if (mDescribeHDRStaticInfoIndex) { 3466 err = mOMX->getConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3467 } 3468 3469 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3470 "[%s] getting HDRStaticInfo failed even though codec advertises support", 3471 mComponentName.c_str()); 3472 return err; 3473} 3474 3475status_t ACodec::setupVideoEncoder( 3476 const char *mime, const sp<AMessage> &msg, 3477 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3478 int32_t tmp; 3479 if (!msg->findInt32("color-format", &tmp)) { 3480 return INVALID_OPERATION; 3481 } 3482 3483 OMX_COLOR_FORMATTYPE colorFormat = 3484 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3485 3486 status_t err = setVideoPortFormatType( 3487 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3488 3489 if (err != OK) { 3490 ALOGE("[%s] does not support color format %d", 3491 mComponentName.c_str(), colorFormat); 3492 3493 return err; 3494 } 3495 3496 /* Input port configuration */ 3497 3498 OMX_PARAM_PORTDEFINITIONTYPE def; 3499 InitOMXParams(&def); 3500 3501 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3502 3503 def.nPortIndex = kPortIndexInput; 3504 3505 err = mOMX->getParameter( 3506 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3507 3508 if (err != OK) { 3509 return err; 3510 } 3511 3512 int32_t width, height, bitrate; 3513 if (!msg->findInt32("width", &width) 3514 || !msg->findInt32("height", &height) 3515 || !msg->findInt32("bitrate", &bitrate)) { 3516 return INVALID_OPERATION; 3517 } 3518 3519 video_def->nFrameWidth = width; 3520 video_def->nFrameHeight = height; 3521 3522 int32_t stride; 3523 if (!msg->findInt32("stride", &stride)) { 3524 stride = width; 3525 } 3526 3527 video_def->nStride = stride; 3528 3529 int32_t sliceHeight; 3530 if (!msg->findInt32("slice-height", &sliceHeight)) { 3531 sliceHeight = height; 3532 } 3533 3534 video_def->nSliceHeight = sliceHeight; 3535 3536 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3537 3538 float frameRate; 3539 if (!msg->findFloat("frame-rate", &frameRate)) { 3540 int32_t tmp; 3541 if (!msg->findInt32("frame-rate", &tmp)) { 3542 return INVALID_OPERATION; 3543 } 3544 frameRate = (float)tmp; 3545 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3546 } 3547 3548 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3549 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3550 // this is redundant as it was already set up in setVideoPortFormatType 3551 // FIXME for now skip this only for flexible YUV formats 3552 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3553 video_def->eColorFormat = colorFormat; 3554 } 3555 3556 err = mOMX->setParameter( 3557 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3558 3559 if (err != OK) { 3560 ALOGE("[%s] failed to set input port definition parameters.", 3561 mComponentName.c_str()); 3562 3563 return err; 3564 } 3565 3566 /* Output port configuration */ 3567 3568 OMX_VIDEO_CODINGTYPE compressionFormat; 3569 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3570 3571 if (err != OK) { 3572 return err; 3573 } 3574 3575 err = setVideoPortFormatType( 3576 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3577 3578 if (err != OK) { 3579 ALOGE("[%s] does not support compression format %d", 3580 mComponentName.c_str(), compressionFormat); 3581 3582 return err; 3583 } 3584 3585 def.nPortIndex = kPortIndexOutput; 3586 3587 err = mOMX->getParameter( 3588 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3589 3590 if (err != OK) { 3591 return err; 3592 } 3593 3594 video_def->nFrameWidth = width; 3595 video_def->nFrameHeight = height; 3596 video_def->xFramerate = 0; 3597 video_def->nBitrate = bitrate; 3598 video_def->eCompressionFormat = compressionFormat; 3599 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3600 3601 err = mOMX->setParameter( 3602 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3603 3604 if (err != OK) { 3605 ALOGE("[%s] failed to set output port definition parameters.", 3606 mComponentName.c_str()); 3607 3608 return err; 3609 } 3610 3611 int32_t intraRefreshPeriod = 0; 3612 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3613 && intraRefreshPeriod >= 0) { 3614 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3615 if (err != OK) { 3616 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3617 mComponentName.c_str()); 3618 err = OK; 3619 } 3620 } 3621 3622 switch (compressionFormat) { 3623 case OMX_VIDEO_CodingMPEG4: 3624 err = setupMPEG4EncoderParameters(msg); 3625 break; 3626 3627 case OMX_VIDEO_CodingH263: 3628 err = setupH263EncoderParameters(msg); 3629 break; 3630 3631 case OMX_VIDEO_CodingAVC: 3632 err = setupAVCEncoderParameters(msg); 3633 break; 3634 3635 case OMX_VIDEO_CodingHEVC: 3636 err = setupHEVCEncoderParameters(msg); 3637 break; 3638 3639 case OMX_VIDEO_CodingVP8: 3640 case OMX_VIDEO_CodingVP9: 3641 err = setupVPXEncoderParameters(msg); 3642 break; 3643 3644 default: 3645 break; 3646 } 3647 3648 // Set up color aspects on input, but propagate them to the output format, as they will 3649 // not be read back from encoder. 3650 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3651 if (err == ERROR_UNSUPPORTED) { 3652 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3653 err = OK; 3654 } 3655 3656 if (err != OK) { 3657 return err; 3658 } 3659 3660 err = setHDRStaticInfoForVideoCodec(kPortIndexInput, msg, outputFormat); 3661 if (err == ERROR_UNSUPPORTED) { // support is optional 3662 ALOGI("[%s] cannot encode HDR static metadata. Ignoring.", mComponentName.c_str()); 3663 err = OK; 3664 } 3665 3666 if (err == OK) { 3667 ALOGI("setupVideoEncoder succeeded"); 3668 } 3669 3670 return err; 3671} 3672 3673status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3674 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3675 InitOMXParams(¶ms); 3676 params.nPortIndex = kPortIndexOutput; 3677 3678 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3679 3680 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3681 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3682 int32_t mbs; 3683 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3684 return INVALID_OPERATION; 3685 } 3686 params.nCirMBs = mbs; 3687 } 3688 3689 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3690 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3691 int32_t mbs; 3692 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3693 return INVALID_OPERATION; 3694 } 3695 params.nAirMBs = mbs; 3696 3697 int32_t ref; 3698 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3699 return INVALID_OPERATION; 3700 } 3701 params.nAirRef = ref; 3702 } 3703 3704 status_t err = mOMX->setParameter( 3705 mNode, OMX_IndexParamVideoIntraRefresh, 3706 ¶ms, sizeof(params)); 3707 return err; 3708} 3709 3710static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) { 3711 if (iFramesInterval < 0) { 3712 return 0xFFFFFFFF; 3713 } else if (iFramesInterval == 0) { 3714 return 0; 3715 } 3716 OMX_U32 ret = frameRate * iFramesInterval; 3717 return ret; 3718} 3719 3720static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 3721 int32_t tmp; 3722 if (!msg->findInt32("bitrate-mode", &tmp)) { 3723 return OMX_Video_ControlRateVariable; 3724 } 3725 3726 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 3727} 3728 3729status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 3730 int32_t bitrate, iFrameInterval; 3731 if (!msg->findInt32("bitrate", &bitrate) 3732 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3733 return INVALID_OPERATION; 3734 } 3735 3736 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3737 3738 float frameRate; 3739 if (!msg->findFloat("frame-rate", &frameRate)) { 3740 int32_t tmp; 3741 if (!msg->findInt32("frame-rate", &tmp)) { 3742 return INVALID_OPERATION; 3743 } 3744 frameRate = (float)tmp; 3745 } 3746 3747 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 3748 InitOMXParams(&mpeg4type); 3749 mpeg4type.nPortIndex = kPortIndexOutput; 3750 3751 status_t err = mOMX->getParameter( 3752 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3753 3754 if (err != OK) { 3755 return err; 3756 } 3757 3758 mpeg4type.nSliceHeaderSpacing = 0; 3759 mpeg4type.bSVH = OMX_FALSE; 3760 mpeg4type.bGov = OMX_FALSE; 3761 3762 mpeg4type.nAllowedPictureTypes = 3763 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3764 3765 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3766 if (mpeg4type.nPFrames == 0) { 3767 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3768 } 3769 mpeg4type.nBFrames = 0; 3770 mpeg4type.nIDCVLCThreshold = 0; 3771 mpeg4type.bACPred = OMX_TRUE; 3772 mpeg4type.nMaxPacketSize = 256; 3773 mpeg4type.nTimeIncRes = 1000; 3774 mpeg4type.nHeaderExtension = 0; 3775 mpeg4type.bReversibleVLC = OMX_FALSE; 3776 3777 int32_t profile; 3778 if (msg->findInt32("profile", &profile)) { 3779 int32_t level; 3780 if (!msg->findInt32("level", &level)) { 3781 return INVALID_OPERATION; 3782 } 3783 3784 err = verifySupportForProfileAndLevel(profile, level); 3785 3786 if (err != OK) { 3787 return err; 3788 } 3789 3790 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 3791 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 3792 } 3793 3794 err = mOMX->setParameter( 3795 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3796 3797 if (err != OK) { 3798 return err; 3799 } 3800 3801 err = configureBitrate(bitrate, bitrateMode); 3802 3803 if (err != OK) { 3804 return err; 3805 } 3806 3807 return setupErrorCorrectionParameters(); 3808} 3809 3810status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 3811 int32_t bitrate, iFrameInterval; 3812 if (!msg->findInt32("bitrate", &bitrate) 3813 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3814 return INVALID_OPERATION; 3815 } 3816 3817 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3818 3819 float frameRate; 3820 if (!msg->findFloat("frame-rate", &frameRate)) { 3821 int32_t tmp; 3822 if (!msg->findInt32("frame-rate", &tmp)) { 3823 return INVALID_OPERATION; 3824 } 3825 frameRate = (float)tmp; 3826 } 3827 3828 OMX_VIDEO_PARAM_H263TYPE h263type; 3829 InitOMXParams(&h263type); 3830 h263type.nPortIndex = kPortIndexOutput; 3831 3832 status_t err = mOMX->getParameter( 3833 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3834 3835 if (err != OK) { 3836 return err; 3837 } 3838 3839 h263type.nAllowedPictureTypes = 3840 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3841 3842 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3843 if (h263type.nPFrames == 0) { 3844 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3845 } 3846 h263type.nBFrames = 0; 3847 3848 int32_t profile; 3849 if (msg->findInt32("profile", &profile)) { 3850 int32_t level; 3851 if (!msg->findInt32("level", &level)) { 3852 return INVALID_OPERATION; 3853 } 3854 3855 err = verifySupportForProfileAndLevel(profile, level); 3856 3857 if (err != OK) { 3858 return err; 3859 } 3860 3861 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 3862 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 3863 } 3864 3865 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 3866 h263type.bForceRoundingTypeToZero = OMX_FALSE; 3867 h263type.nPictureHeaderRepetition = 0; 3868 h263type.nGOBHeaderInterval = 0; 3869 3870 err = mOMX->setParameter( 3871 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3872 3873 if (err != OK) { 3874 return err; 3875 } 3876 3877 err = configureBitrate(bitrate, bitrateMode); 3878 3879 if (err != OK) { 3880 return err; 3881 } 3882 3883 return setupErrorCorrectionParameters(); 3884} 3885 3886// static 3887int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 3888 int width, int height, int rate, int bitrate, 3889 OMX_VIDEO_AVCPROFILETYPE profile) { 3890 // convert bitrate to main/baseline profile kbps equivalent 3891 switch (profile) { 3892 case OMX_VIDEO_AVCProfileHigh10: 3893 bitrate = divUp(bitrate, 3000); break; 3894 case OMX_VIDEO_AVCProfileHigh: 3895 bitrate = divUp(bitrate, 1250); break; 3896 default: 3897 bitrate = divUp(bitrate, 1000); break; 3898 } 3899 3900 // convert size and rate to MBs 3901 width = divUp(width, 16); 3902 height = divUp(height, 16); 3903 int mbs = width * height; 3904 rate *= mbs; 3905 int maxDimension = max(width, height); 3906 3907 static const int limits[][5] = { 3908 /* MBps MB dim bitrate level */ 3909 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 3910 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 3911 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 3912 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 3913 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 3914 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 3915 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 3916 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 3917 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 3918 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 3919 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 3920 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 3921 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 3922 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 3923 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 3924 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 3925 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 3926 }; 3927 3928 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 3929 const int (&limit)[5] = limits[i]; 3930 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 3931 && bitrate <= limit[3]) { 3932 return limit[4]; 3933 } 3934 } 3935 return 0; 3936} 3937 3938status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 3939 int32_t bitrate, iFrameInterval; 3940 if (!msg->findInt32("bitrate", &bitrate) 3941 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3942 return INVALID_OPERATION; 3943 } 3944 3945 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3946 3947 float frameRate; 3948 if (!msg->findFloat("frame-rate", &frameRate)) { 3949 int32_t tmp; 3950 if (!msg->findInt32("frame-rate", &tmp)) { 3951 return INVALID_OPERATION; 3952 } 3953 frameRate = (float)tmp; 3954 } 3955 3956 status_t err = OK; 3957 int32_t intraRefreshMode = 0; 3958 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 3959 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 3960 if (err != OK) { 3961 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 3962 err, intraRefreshMode); 3963 return err; 3964 } 3965 } 3966 3967 OMX_VIDEO_PARAM_AVCTYPE h264type; 3968 InitOMXParams(&h264type); 3969 h264type.nPortIndex = kPortIndexOutput; 3970 3971 err = mOMX->getParameter( 3972 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 3973 3974 if (err != OK) { 3975 return err; 3976 } 3977 3978 h264type.nAllowedPictureTypes = 3979 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3980 3981 int32_t profile; 3982 if (msg->findInt32("profile", &profile)) { 3983 int32_t level; 3984 if (!msg->findInt32("level", &level)) { 3985 return INVALID_OPERATION; 3986 } 3987 3988 err = verifySupportForProfileAndLevel(profile, level); 3989 3990 if (err != OK) { 3991 return err; 3992 } 3993 3994 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 3995 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 3996 } else { 3997 // Use baseline profile for AVC recording if profile is not specified. 3998 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 3999 } 4000 4001 ALOGI("setupAVCEncoderParameters with [profile: %s] [level: %s]", 4002 asString(h264type.eProfile), asString(h264type.eLevel)); 4003 4004 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 4005 h264type.nSliceHeaderSpacing = 0; 4006 h264type.bUseHadamard = OMX_TRUE; 4007 h264type.nRefFrames = 1; 4008 h264type.nBFrames = 0; 4009 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 4010 if (h264type.nPFrames == 0) { 4011 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4012 } 4013 h264type.nRefIdx10ActiveMinus1 = 0; 4014 h264type.nRefIdx11ActiveMinus1 = 0; 4015 h264type.bEntropyCodingCABAC = OMX_FALSE; 4016 h264type.bWeightedPPrediction = OMX_FALSE; 4017 h264type.bconstIpred = OMX_FALSE; 4018 h264type.bDirect8x8Inference = OMX_FALSE; 4019 h264type.bDirectSpatialTemporal = OMX_FALSE; 4020 h264type.nCabacInitIdc = 0; 4021 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 4022 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 4023 h264type.nSliceHeaderSpacing = 0; 4024 h264type.bUseHadamard = OMX_TRUE; 4025 h264type.nRefFrames = 2; 4026 h264type.nBFrames = 1; 4027 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 4028 h264type.nAllowedPictureTypes = 4029 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 4030 h264type.nRefIdx10ActiveMinus1 = 0; 4031 h264type.nRefIdx11ActiveMinus1 = 0; 4032 h264type.bEntropyCodingCABAC = OMX_TRUE; 4033 h264type.bWeightedPPrediction = OMX_TRUE; 4034 h264type.bconstIpred = OMX_TRUE; 4035 h264type.bDirect8x8Inference = OMX_TRUE; 4036 h264type.bDirectSpatialTemporal = OMX_TRUE; 4037 h264type.nCabacInitIdc = 1; 4038 } 4039 4040 if (h264type.nBFrames != 0) { 4041 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 4042 } 4043 4044 h264type.bEnableUEP = OMX_FALSE; 4045 h264type.bEnableFMO = OMX_FALSE; 4046 h264type.bEnableASO = OMX_FALSE; 4047 h264type.bEnableRS = OMX_FALSE; 4048 h264type.bFrameMBsOnly = OMX_TRUE; 4049 h264type.bMBAFF = OMX_FALSE; 4050 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 4051 4052 err = mOMX->setParameter( 4053 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4054 4055 if (err != OK) { 4056 return err; 4057 } 4058 4059 return configureBitrate(bitrate, bitrateMode); 4060} 4061 4062status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 4063 int32_t bitrate, iFrameInterval; 4064 if (!msg->findInt32("bitrate", &bitrate) 4065 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4066 return INVALID_OPERATION; 4067 } 4068 4069 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4070 4071 float frameRate; 4072 if (!msg->findFloat("frame-rate", &frameRate)) { 4073 int32_t tmp; 4074 if (!msg->findInt32("frame-rate", &tmp)) { 4075 return INVALID_OPERATION; 4076 } 4077 frameRate = (float)tmp; 4078 } 4079 4080 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 4081 InitOMXParams(&hevcType); 4082 hevcType.nPortIndex = kPortIndexOutput; 4083 4084 status_t err = OK; 4085 err = mOMX->getParameter( 4086 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4087 if (err != OK) { 4088 return err; 4089 } 4090 4091 int32_t profile; 4092 if (msg->findInt32("profile", &profile)) { 4093 int32_t level; 4094 if (!msg->findInt32("level", &level)) { 4095 return INVALID_OPERATION; 4096 } 4097 4098 err = verifySupportForProfileAndLevel(profile, level); 4099 if (err != OK) { 4100 return err; 4101 } 4102 4103 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 4104 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 4105 } 4106 // TODO: finer control? 4107 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4108 4109 err = mOMX->setParameter( 4110 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4111 if (err != OK) { 4112 return err; 4113 } 4114 4115 return configureBitrate(bitrate, bitrateMode); 4116} 4117 4118status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { 4119 int32_t bitrate; 4120 int32_t iFrameInterval = 0; 4121 size_t tsLayers = 0; 4122 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 4123 OMX_VIDEO_VPXTemporalLayerPatternNone; 4124 static const uint32_t kVp8LayerRateAlloction 4125 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 4126 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 4127 {100, 100, 100}, // 1 layer 4128 { 60, 100, 100}, // 2 layers {60%, 40%} 4129 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 4130 }; 4131 if (!msg->findInt32("bitrate", &bitrate)) { 4132 return INVALID_OPERATION; 4133 } 4134 msg->findInt32("i-frame-interval", &iFrameInterval); 4135 4136 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4137 4138 float frameRate; 4139 if (!msg->findFloat("frame-rate", &frameRate)) { 4140 int32_t tmp; 4141 if (!msg->findInt32("frame-rate", &tmp)) { 4142 return INVALID_OPERATION; 4143 } 4144 frameRate = (float)tmp; 4145 } 4146 4147 AString tsSchema; 4148 if (msg->findString("ts-schema", &tsSchema)) { 4149 if (tsSchema == "webrtc.vp8.1-layer") { 4150 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4151 tsLayers = 1; 4152 } else if (tsSchema == "webrtc.vp8.2-layer") { 4153 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4154 tsLayers = 2; 4155 } else if (tsSchema == "webrtc.vp8.3-layer") { 4156 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4157 tsLayers = 3; 4158 } else { 4159 ALOGW("Unsupported ts-schema [%s]", tsSchema.c_str()); 4160 } 4161 } 4162 4163 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4164 InitOMXParams(&vp8type); 4165 vp8type.nPortIndex = kPortIndexOutput; 4166 status_t err = mOMX->getParameter( 4167 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4168 &vp8type, sizeof(vp8type)); 4169 4170 if (err == OK) { 4171 if (iFrameInterval > 0) { 4172 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4173 } 4174 vp8type.eTemporalPattern = pattern; 4175 vp8type.nTemporalLayerCount = tsLayers; 4176 if (tsLayers > 0) { 4177 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4178 vp8type.nTemporalLayerBitrateRatio[i] = 4179 kVp8LayerRateAlloction[tsLayers - 1][i]; 4180 } 4181 } 4182 if (bitrateMode == OMX_Video_ControlRateConstant) { 4183 vp8type.nMinQuantizer = 2; 4184 vp8type.nMaxQuantizer = 63; 4185 } 4186 4187 err = mOMX->setParameter( 4188 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4189 &vp8type, sizeof(vp8type)); 4190 if (err != OK) { 4191 ALOGW("Extended VP8 parameters set failed: %d", err); 4192 } 4193 } 4194 4195 return configureBitrate(bitrate, bitrateMode); 4196} 4197 4198status_t ACodec::verifySupportForProfileAndLevel( 4199 int32_t profile, int32_t level) { 4200 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4201 InitOMXParams(¶ms); 4202 params.nPortIndex = kPortIndexOutput; 4203 4204 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4205 params.nProfileIndex = index; 4206 status_t err = mOMX->getParameter( 4207 mNode, 4208 OMX_IndexParamVideoProfileLevelQuerySupported, 4209 ¶ms, 4210 sizeof(params)); 4211 4212 if (err != OK) { 4213 return err; 4214 } 4215 4216 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4217 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4218 4219 if (profile == supportedProfile && level <= supportedLevel) { 4220 return OK; 4221 } 4222 4223 if (index == kMaxIndicesToCheck) { 4224 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4225 mComponentName.c_str(), index, 4226 params.eProfile, params.eLevel); 4227 } 4228 } 4229 return ERROR_UNSUPPORTED; 4230} 4231 4232status_t ACodec::configureBitrate( 4233 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4234 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4235 InitOMXParams(&bitrateType); 4236 bitrateType.nPortIndex = kPortIndexOutput; 4237 4238 status_t err = mOMX->getParameter( 4239 mNode, OMX_IndexParamVideoBitrate, 4240 &bitrateType, sizeof(bitrateType)); 4241 4242 if (err != OK) { 4243 return err; 4244 } 4245 4246 bitrateType.eControlRate = bitrateMode; 4247 bitrateType.nTargetBitrate = bitrate; 4248 4249 return mOMX->setParameter( 4250 mNode, OMX_IndexParamVideoBitrate, 4251 &bitrateType, sizeof(bitrateType)); 4252} 4253 4254status_t ACodec::setupErrorCorrectionParameters() { 4255 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4256 InitOMXParams(&errorCorrectionType); 4257 errorCorrectionType.nPortIndex = kPortIndexOutput; 4258 4259 status_t err = mOMX->getParameter( 4260 mNode, OMX_IndexParamVideoErrorCorrection, 4261 &errorCorrectionType, sizeof(errorCorrectionType)); 4262 4263 if (err != OK) { 4264 return OK; // Optional feature. Ignore this failure 4265 } 4266 4267 errorCorrectionType.bEnableHEC = OMX_FALSE; 4268 errorCorrectionType.bEnableResync = OMX_TRUE; 4269 errorCorrectionType.nResynchMarkerSpacing = 256; 4270 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4271 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4272 4273 return mOMX->setParameter( 4274 mNode, OMX_IndexParamVideoErrorCorrection, 4275 &errorCorrectionType, sizeof(errorCorrectionType)); 4276} 4277 4278status_t ACodec::setVideoFormatOnPort( 4279 OMX_U32 portIndex, 4280 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4281 float frameRate) { 4282 OMX_PARAM_PORTDEFINITIONTYPE def; 4283 InitOMXParams(&def); 4284 def.nPortIndex = portIndex; 4285 4286 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4287 4288 status_t err = mOMX->getParameter( 4289 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4290 if (err != OK) { 4291 return err; 4292 } 4293 4294 if (portIndex == kPortIndexInput) { 4295 // XXX Need a (much) better heuristic to compute input buffer sizes. 4296 const size_t X = 64 * 1024; 4297 if (def.nBufferSize < X) { 4298 def.nBufferSize = X; 4299 } 4300 } 4301 4302 if (def.eDomain != OMX_PortDomainVideo) { 4303 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4304 return FAILED_TRANSACTION; 4305 } 4306 4307 video_def->nFrameWidth = width; 4308 video_def->nFrameHeight = height; 4309 4310 if (portIndex == kPortIndexInput) { 4311 video_def->eCompressionFormat = compressionFormat; 4312 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4313 if (frameRate >= 0) { 4314 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4315 } 4316 } 4317 4318 err = mOMX->setParameter( 4319 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4320 4321 return err; 4322} 4323 4324status_t ACodec::initNativeWindow() { 4325 if (mNativeWindow != NULL) { 4326 return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4327 } 4328 4329 mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4330 return OK; 4331} 4332 4333size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4334 size_t n = 0; 4335 4336 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4337 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4338 4339 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4340 ++n; 4341 } 4342 } 4343 4344 return n; 4345} 4346 4347size_t ACodec::countBuffersOwnedByNativeWindow() const { 4348 size_t n = 0; 4349 4350 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4351 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4352 4353 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4354 ++n; 4355 } 4356 } 4357 4358 return n; 4359} 4360 4361void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4362 if (mNativeWindow == NULL) { 4363 return; 4364 } 4365 4366 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4367 && dequeueBufferFromNativeWindow() != NULL) { 4368 // these buffers will be submitted as regular buffers; account for this 4369 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4370 --mMetadataBuffersToSubmit; 4371 } 4372 } 4373} 4374 4375bool ACodec::allYourBuffersAreBelongToUs( 4376 OMX_U32 portIndex) { 4377 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4378 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4379 4380 if (info->mStatus != BufferInfo::OWNED_BY_US 4381 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4382 ALOGV("[%s] Buffer %u on port %u still has status %d", 4383 mComponentName.c_str(), 4384 info->mBufferID, portIndex, info->mStatus); 4385 return false; 4386 } 4387 } 4388 4389 return true; 4390} 4391 4392bool ACodec::allYourBuffersAreBelongToUs() { 4393 return allYourBuffersAreBelongToUs(kPortIndexInput) 4394 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4395} 4396 4397void ACodec::deferMessage(const sp<AMessage> &msg) { 4398 mDeferredQueue.push_back(msg); 4399} 4400 4401void ACodec::processDeferredMessages() { 4402 List<sp<AMessage> > queue = mDeferredQueue; 4403 mDeferredQueue.clear(); 4404 4405 List<sp<AMessage> >::iterator it = queue.begin(); 4406 while (it != queue.end()) { 4407 onMessageReceived(*it++); 4408 } 4409} 4410 4411status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4412 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4413 OMX_PARAM_PORTDEFINITIONTYPE def; 4414 InitOMXParams(&def); 4415 def.nPortIndex = portIndex; 4416 4417 status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4418 if (err != OK) { 4419 return err; 4420 } 4421 4422 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4423 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4424 return BAD_VALUE; 4425 } 4426 4427 switch (def.eDomain) { 4428 case OMX_PortDomainVideo: 4429 { 4430 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4431 switch ((int)videoDef->eCompressionFormat) { 4432 case OMX_VIDEO_CodingUnused: 4433 { 4434 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4435 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4436 4437 notify->setInt32("stride", videoDef->nStride); 4438 notify->setInt32("slice-height", videoDef->nSliceHeight); 4439 notify->setInt32("color-format", videoDef->eColorFormat); 4440 4441 if (mNativeWindow == NULL) { 4442 DescribeColorFormat2Params describeParams; 4443 InitOMXParams(&describeParams); 4444 describeParams.eColorFormat = videoDef->eColorFormat; 4445 describeParams.nFrameWidth = videoDef->nFrameWidth; 4446 describeParams.nFrameHeight = videoDef->nFrameHeight; 4447 describeParams.nStride = videoDef->nStride; 4448 describeParams.nSliceHeight = videoDef->nSliceHeight; 4449 describeParams.bUsingNativeBuffers = OMX_FALSE; 4450 4451 if (DescribeColorFormat(mOMX, mNode, describeParams)) { 4452 notify->setBuffer( 4453 "image-data", 4454 ABuffer::CreateAsCopy( 4455 &describeParams.sMediaImage, 4456 sizeof(describeParams.sMediaImage))); 4457 4458 MediaImage2 &img = describeParams.sMediaImage; 4459 MediaImage2::PlaneInfo *plane = img.mPlane; 4460 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4461 mComponentName.c_str(), img.mWidth, img.mHeight, 4462 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4463 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4464 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4465 } 4466 } 4467 4468 int32_t width = (int32_t)videoDef->nFrameWidth; 4469 int32_t height = (int32_t)videoDef->nFrameHeight; 4470 4471 if (portIndex == kPortIndexOutput) { 4472 OMX_CONFIG_RECTTYPE rect; 4473 InitOMXParams(&rect); 4474 rect.nPortIndex = portIndex; 4475 4476 if (mOMX->getConfig( 4477 mNode, 4478 (portIndex == kPortIndexOutput ? 4479 OMX_IndexConfigCommonOutputCrop : 4480 OMX_IndexConfigCommonInputCrop), 4481 &rect, sizeof(rect)) != OK) { 4482 rect.nLeft = 0; 4483 rect.nTop = 0; 4484 rect.nWidth = videoDef->nFrameWidth; 4485 rect.nHeight = videoDef->nFrameHeight; 4486 } 4487 4488 if (rect.nLeft < 0 || 4489 rect.nTop < 0 || 4490 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4491 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4492 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 4493 rect.nLeft, rect.nTop, 4494 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 4495 videoDef->nFrameWidth, videoDef->nFrameHeight); 4496 return BAD_VALUE; 4497 } 4498 4499 notify->setRect( 4500 "crop", 4501 rect.nLeft, 4502 rect.nTop, 4503 rect.nLeft + rect.nWidth - 1, 4504 rect.nTop + rect.nHeight - 1); 4505 4506 width = rect.nWidth; 4507 height = rect.nHeight; 4508 4509 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4510 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4511 width, height, mConfigFormat, notify, 4512 mUsingNativeWindow ? &dataSpace : NULL); 4513 if (mUsingNativeWindow) { 4514 notify->setInt32("android._dataspace", dataSpace); 4515 } 4516 (void)getHDRStaticInfoForVideoCodec(kPortIndexOutput, notify); 4517 } else { 4518 (void)getInputColorAspectsForVideoEncoder(notify); 4519 if (mConfigFormat->contains("hdr-static-info")) { 4520 (void)getHDRStaticInfoForVideoCodec(kPortIndexInput, notify); 4521 } 4522 } 4523 4524 break; 4525 } 4526 4527 case OMX_VIDEO_CodingVP8: 4528 case OMX_VIDEO_CodingVP9: 4529 { 4530 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4531 InitOMXParams(&vp8type); 4532 vp8type.nPortIndex = kPortIndexOutput; 4533 status_t err = mOMX->getParameter( 4534 mNode, 4535 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4536 &vp8type, 4537 sizeof(vp8type)); 4538 4539 if (err == OK) { 4540 AString tsSchema = "none"; 4541 if (vp8type.eTemporalPattern 4542 == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { 4543 switch (vp8type.nTemporalLayerCount) { 4544 case 1: 4545 { 4546 tsSchema = "webrtc.vp8.1-layer"; 4547 break; 4548 } 4549 case 2: 4550 { 4551 tsSchema = "webrtc.vp8.2-layer"; 4552 break; 4553 } 4554 case 3: 4555 { 4556 tsSchema = "webrtc.vp8.3-layer"; 4557 break; 4558 } 4559 default: 4560 { 4561 break; 4562 } 4563 } 4564 } 4565 notify->setString("ts-schema", tsSchema); 4566 } 4567 // Fall through to set up mime. 4568 } 4569 4570 default: 4571 { 4572 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 4573 // should be CodingUnused 4574 ALOGE("Raw port video compression format is %s(%d)", 4575 asString(videoDef->eCompressionFormat), 4576 videoDef->eCompressionFormat); 4577 return BAD_VALUE; 4578 } 4579 AString mime; 4580 if (GetMimeTypeForVideoCoding( 4581 videoDef->eCompressionFormat, &mime) != OK) { 4582 notify->setString("mime", "application/octet-stream"); 4583 } else { 4584 notify->setString("mime", mime.c_str()); 4585 } 4586 uint32_t intraRefreshPeriod = 0; 4587 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 4588 && intraRefreshPeriod > 0) { 4589 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 4590 } 4591 break; 4592 } 4593 } 4594 notify->setInt32("width", videoDef->nFrameWidth); 4595 notify->setInt32("height", videoDef->nFrameHeight); 4596 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 4597 portIndex == kPortIndexInput ? "input" : "output", 4598 notify->debugString().c_str()); 4599 4600 break; 4601 } 4602 4603 case OMX_PortDomainAudio: 4604 { 4605 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 4606 4607 switch ((int)audioDef->eEncoding) { 4608 case OMX_AUDIO_CodingPCM: 4609 { 4610 OMX_AUDIO_PARAM_PCMMODETYPE params; 4611 InitOMXParams(¶ms); 4612 params.nPortIndex = portIndex; 4613 4614 err = mOMX->getParameter( 4615 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4616 if (err != OK) { 4617 return err; 4618 } 4619 4620 if (params.nChannels <= 0 4621 || (params.nChannels != 1 && !params.bInterleaved) 4622 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 4623 ALOGE("unsupported PCM port: %u channels%s, %u-bit", 4624 params.nChannels, 4625 params.bInterleaved ? " interleaved" : "", 4626 params.nBitPerSample); 4627 return FAILED_TRANSACTION; 4628 } 4629 4630 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 4631 notify->setInt32("channel-count", params.nChannels); 4632 notify->setInt32("sample-rate", params.nSamplingRate); 4633 4634 AudioEncoding encoding = kAudioEncodingPcm16bit; 4635 if (params.eNumData == OMX_NumericalDataUnsigned 4636 && params.nBitPerSample == 8u) { 4637 encoding = kAudioEncodingPcm8bit; 4638 } else if (params.eNumData == OMX_NumericalDataFloat 4639 && params.nBitPerSample == 32u) { 4640 encoding = kAudioEncodingPcmFloat; 4641 } else if (params.nBitPerSample != 16u 4642 || params.eNumData != OMX_NumericalDataSigned) { 4643 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", 4644 asString(params.eNumData), params.eNumData, 4645 asString(params.ePCMMode), params.ePCMMode); 4646 return FAILED_TRANSACTION; 4647 } 4648 notify->setInt32("pcm-encoding", encoding); 4649 4650 if (mChannelMaskPresent) { 4651 notify->setInt32("channel-mask", mChannelMask); 4652 } 4653 break; 4654 } 4655 4656 case OMX_AUDIO_CodingAAC: 4657 { 4658 OMX_AUDIO_PARAM_AACPROFILETYPE params; 4659 InitOMXParams(¶ms); 4660 params.nPortIndex = portIndex; 4661 4662 err = mOMX->getParameter( 4663 mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 4664 if (err != OK) { 4665 return err; 4666 } 4667 4668 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 4669 notify->setInt32("channel-count", params.nChannels); 4670 notify->setInt32("sample-rate", params.nSampleRate); 4671 break; 4672 } 4673 4674 case OMX_AUDIO_CodingAMR: 4675 { 4676 OMX_AUDIO_PARAM_AMRTYPE params; 4677 InitOMXParams(¶ms); 4678 params.nPortIndex = portIndex; 4679 4680 err = mOMX->getParameter( 4681 mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 4682 if (err != OK) { 4683 return err; 4684 } 4685 4686 notify->setInt32("channel-count", 1); 4687 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 4688 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 4689 notify->setInt32("sample-rate", 16000); 4690 } else { 4691 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 4692 notify->setInt32("sample-rate", 8000); 4693 } 4694 break; 4695 } 4696 4697 case OMX_AUDIO_CodingFLAC: 4698 { 4699 OMX_AUDIO_PARAM_FLACTYPE params; 4700 InitOMXParams(¶ms); 4701 params.nPortIndex = portIndex; 4702 4703 err = mOMX->getParameter( 4704 mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 4705 if (err != OK) { 4706 return err; 4707 } 4708 4709 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 4710 notify->setInt32("channel-count", params.nChannels); 4711 notify->setInt32("sample-rate", params.nSampleRate); 4712 break; 4713 } 4714 4715 case OMX_AUDIO_CodingMP3: 4716 { 4717 OMX_AUDIO_PARAM_MP3TYPE params; 4718 InitOMXParams(¶ms); 4719 params.nPortIndex = portIndex; 4720 4721 err = mOMX->getParameter( 4722 mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 4723 if (err != OK) { 4724 return err; 4725 } 4726 4727 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 4728 notify->setInt32("channel-count", params.nChannels); 4729 notify->setInt32("sample-rate", params.nSampleRate); 4730 break; 4731 } 4732 4733 case OMX_AUDIO_CodingVORBIS: 4734 { 4735 OMX_AUDIO_PARAM_VORBISTYPE params; 4736 InitOMXParams(¶ms); 4737 params.nPortIndex = portIndex; 4738 4739 err = mOMX->getParameter( 4740 mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 4741 if (err != OK) { 4742 return err; 4743 } 4744 4745 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 4746 notify->setInt32("channel-count", params.nChannels); 4747 notify->setInt32("sample-rate", params.nSampleRate); 4748 break; 4749 } 4750 4751 case OMX_AUDIO_CodingAndroidAC3: 4752 { 4753 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 4754 InitOMXParams(¶ms); 4755 params.nPortIndex = portIndex; 4756 4757 err = mOMX->getParameter( 4758 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 4759 ¶ms, sizeof(params)); 4760 if (err != OK) { 4761 return err; 4762 } 4763 4764 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 4765 notify->setInt32("channel-count", params.nChannels); 4766 notify->setInt32("sample-rate", params.nSampleRate); 4767 break; 4768 } 4769 4770 case OMX_AUDIO_CodingAndroidEAC3: 4771 { 4772 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 4773 InitOMXParams(¶ms); 4774 params.nPortIndex = portIndex; 4775 4776 err = mOMX->getParameter( 4777 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 4778 ¶ms, sizeof(params)); 4779 if (err != OK) { 4780 return err; 4781 } 4782 4783 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 4784 notify->setInt32("channel-count", params.nChannels); 4785 notify->setInt32("sample-rate", params.nSampleRate); 4786 break; 4787 } 4788 4789 case OMX_AUDIO_CodingAndroidOPUS: 4790 { 4791 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 4792 InitOMXParams(¶ms); 4793 params.nPortIndex = portIndex; 4794 4795 err = mOMX->getParameter( 4796 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 4797 ¶ms, sizeof(params)); 4798 if (err != OK) { 4799 return err; 4800 } 4801 4802 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 4803 notify->setInt32("channel-count", params.nChannels); 4804 notify->setInt32("sample-rate", params.nSampleRate); 4805 break; 4806 } 4807 4808 case OMX_AUDIO_CodingG711: 4809 { 4810 OMX_AUDIO_PARAM_PCMMODETYPE params; 4811 InitOMXParams(¶ms); 4812 params.nPortIndex = portIndex; 4813 4814 err = mOMX->getParameter( 4815 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4816 if (err != OK) { 4817 return err; 4818 } 4819 4820 const char *mime = NULL; 4821 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 4822 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 4823 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 4824 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 4825 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 4826 mime = MEDIA_MIMETYPE_AUDIO_RAW; 4827 } 4828 notify->setString("mime", mime); 4829 notify->setInt32("channel-count", params.nChannels); 4830 notify->setInt32("sample-rate", params.nSamplingRate); 4831 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 4832 break; 4833 } 4834 4835 case OMX_AUDIO_CodingGSMFR: 4836 { 4837 OMX_AUDIO_PARAM_PCMMODETYPE params; 4838 InitOMXParams(¶ms); 4839 params.nPortIndex = portIndex; 4840 4841 err = mOMX->getParameter( 4842 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4843 if (err != OK) { 4844 return err; 4845 } 4846 4847 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 4848 notify->setInt32("channel-count", params.nChannels); 4849 notify->setInt32("sample-rate", params.nSamplingRate); 4850 break; 4851 } 4852 4853 default: 4854 ALOGE("Unsupported audio coding: %s(%d)\n", 4855 asString(audioDef->eEncoding), audioDef->eEncoding); 4856 return BAD_TYPE; 4857 } 4858 break; 4859 } 4860 4861 default: 4862 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 4863 return BAD_TYPE; 4864 } 4865 4866 return OK; 4867} 4868 4869void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 4870 // aspects are normally communicated in ColorAspects 4871 int32_t range, standard, transfer; 4872 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 4873 4874 // if some aspects are unspecified, use dataspace fields 4875 if (range != 0) { 4876 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 4877 } 4878 if (standard != 0) { 4879 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 4880 } 4881 if (transfer != 0) { 4882 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 4883 } 4884 4885 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 4886 if (range != 0) { 4887 mOutputFormat->setInt32("color-range", range); 4888 } 4889 if (standard != 0) { 4890 mOutputFormat->setInt32("color-standard", standard); 4891 } 4892 if (transfer != 0) { 4893 mOutputFormat->setInt32("color-transfer", transfer); 4894 } 4895 4896 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 4897 "(R:%d(%s), S:%d(%s), T:%d(%s))", 4898 dataSpace, 4899 aspects.mRange, asString(aspects.mRange), 4900 aspects.mPrimaries, asString(aspects.mPrimaries), 4901 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 4902 aspects.mTransfer, asString(aspects.mTransfer), 4903 range, asString((ColorRange)range), 4904 standard, asString((ColorStandard)standard), 4905 transfer, asString((ColorTransfer)transfer)); 4906} 4907 4908void ACodec::onOutputFormatChanged(sp<const AMessage> expectedFormat) { 4909 // store new output format, at the same time mark that this is no longer the first frame 4910 mOutputFormat = mBaseOutputFormat->dup(); 4911 4912 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 4913 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 4914 return; 4915 } 4916 4917 if (expectedFormat != NULL) { 4918 sp<const AMessage> changes = expectedFormat->changesFrom(mOutputFormat); 4919 sp<const AMessage> to = mOutputFormat->changesFrom(expectedFormat); 4920 if (changes->countEntries() != 0 || to->countEntries() != 0) { 4921 ALOGW("[%s] BAD CODEC: Output format changed unexpectedly from (diff) %s to (diff) %s", 4922 mComponentName.c_str(), 4923 changes->debugString(4).c_str(), to->debugString(4).c_str()); 4924 } 4925 } 4926 4927 if (!mIsVideo && !mIsEncoder) { 4928 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 4929 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 4930 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 4931 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 4932 4933 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 4934 if (mConverter[kPortIndexOutput] != NULL) { 4935 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 4936 } 4937 } 4938 4939 if (mTunneled) { 4940 sendFormatChange(); 4941 } 4942} 4943 4944void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 4945 AString mime; 4946 CHECK(mOutputFormat->findString("mime", &mime)); 4947 4948 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 4949 // notify renderer of the crop change and dataspace change 4950 // NOTE: native window uses extended right-bottom coordinate 4951 int32_t left, top, right, bottom; 4952 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 4953 notify->setRect("crop", left, top, right + 1, bottom + 1); 4954 } 4955 4956 int32_t dataSpace; 4957 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 4958 notify->setInt32("dataspace", dataSpace); 4959 } 4960 } 4961} 4962 4963void ACodec::sendFormatChange() { 4964 AString mime; 4965 CHECK(mOutputFormat->findString("mime", &mime)); 4966 4967 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 4968 int32_t channelCount, sampleRate; 4969 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 4970 CHECK(mOutputFormat->findInt32("sample-rate", &sampleRate)); 4971 if (mSampleRate != 0 && sampleRate != 0) { 4972 mEncoderDelay = mEncoderDelay * sampleRate / mSampleRate; 4973 mEncoderPadding = mEncoderPadding * sampleRate / mSampleRate; 4974 mSampleRate = sampleRate; 4975 } 4976 if (mSkipCutBuffer != NULL) { 4977 size_t prevbufsize = mSkipCutBuffer->size(); 4978 if (prevbufsize != 0) { 4979 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 4980 } 4981 } 4982 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 4983 } 4984 4985 sp<AMessage> notify = mNotify->dup(); 4986 notify->setInt32("what", kWhatOutputFormatChanged); 4987 notify->setMessage("format", mOutputFormat); 4988 notify->post(); 4989 4990 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 4991 mLastOutputFormat = mOutputFormat; 4992} 4993 4994void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 4995 sp<AMessage> notify = mNotify->dup(); 4996 notify->setInt32("what", CodecBase::kWhatError); 4997 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 4998 4999 if (internalError == UNKNOWN_ERROR) { // find better error code 5000 const status_t omxStatus = statusFromOMXError(error); 5001 if (omxStatus != 0) { 5002 internalError = omxStatus; 5003 } else { 5004 ALOGW("Invalid OMX error %#x", error); 5005 } 5006 } 5007 5008 mFatalError = true; 5009 5010 notify->setInt32("err", internalError); 5011 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5012 notify->post(); 5013} 5014 5015//////////////////////////////////////////////////////////////////////////////// 5016 5017ACodec::PortDescription::PortDescription() { 5018} 5019 5020status_t ACodec::requestIDRFrame() { 5021 if (!mIsEncoder) { 5022 return ERROR_UNSUPPORTED; 5023 } 5024 5025 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5026 InitOMXParams(¶ms); 5027 5028 params.nPortIndex = kPortIndexOutput; 5029 params.IntraRefreshVOP = OMX_TRUE; 5030 5031 return mOMX->setConfig( 5032 mNode, 5033 OMX_IndexConfigVideoIntraVOPRefresh, 5034 ¶ms, 5035 sizeof(params)); 5036} 5037 5038void ACodec::PortDescription::addBuffer( 5039 IOMX::buffer_id id, const sp<ABuffer> &buffer, 5040 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) { 5041 mBufferIDs.push_back(id); 5042 mBuffers.push_back(buffer); 5043 mHandles.push_back(handle); 5044 mMemRefs.push_back(memRef); 5045} 5046 5047size_t ACodec::PortDescription::countBuffers() { 5048 return mBufferIDs.size(); 5049} 5050 5051IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5052 return mBufferIDs.itemAt(index); 5053} 5054 5055sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5056 return mBuffers.itemAt(index); 5057} 5058 5059sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const { 5060 return mHandles.itemAt(index); 5061} 5062 5063sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const { 5064 return mMemRefs.itemAt(index); 5065} 5066 5067//////////////////////////////////////////////////////////////////////////////// 5068 5069ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5070 : AState(parentState), 5071 mCodec(codec) { 5072} 5073 5074ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5075 OMX_U32 /* portIndex */) { 5076 return KEEP_BUFFERS; 5077} 5078 5079bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5080 switch (msg->what()) { 5081 case kWhatInputBufferFilled: 5082 { 5083 onInputBufferFilled(msg); 5084 break; 5085 } 5086 5087 case kWhatOutputBufferDrained: 5088 { 5089 onOutputBufferDrained(msg); 5090 break; 5091 } 5092 5093 case ACodec::kWhatOMXMessageList: 5094 { 5095 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5096 } 5097 5098 case ACodec::kWhatOMXMessageItem: 5099 { 5100 // no need to check as we already did it for kWhatOMXMessageList 5101 return onOMXMessage(msg); 5102 } 5103 5104 case ACodec::kWhatOMXMessage: 5105 { 5106 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5107 } 5108 5109 case ACodec::kWhatSetSurface: 5110 { 5111 sp<AReplyToken> replyID; 5112 CHECK(msg->senderAwaitsResponse(&replyID)); 5113 5114 sp<RefBase> obj; 5115 CHECK(msg->findObject("surface", &obj)); 5116 5117 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5118 5119 sp<AMessage> response = new AMessage; 5120 response->setInt32("err", err); 5121 response->postReply(replyID); 5122 break; 5123 } 5124 5125 case ACodec::kWhatCreateInputSurface: 5126 case ACodec::kWhatSetInputSurface: 5127 case ACodec::kWhatSignalEndOfInputStream: 5128 { 5129 // This may result in an app illegal state exception. 5130 ALOGE("Message 0x%x was not handled", msg->what()); 5131 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5132 return true; 5133 } 5134 5135 case ACodec::kWhatOMXDied: 5136 { 5137 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5138 ALOGE("OMX/mediaserver died, signalling error!"); 5139 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5140 break; 5141 } 5142 5143 case ACodec::kWhatReleaseCodecInstance: 5144 { 5145 ALOGI("[%s] forcing the release of codec", 5146 mCodec->mComponentName.c_str()); 5147 status_t err = mCodec->mOMX->freeNode(mCodec->mNode); 5148 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5149 mCodec->mComponentName.c_str(), err); 5150 sp<AMessage> notify = mCodec->mNotify->dup(); 5151 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5152 notify->post(); 5153 break; 5154 } 5155 5156 default: 5157 return false; 5158 } 5159 5160 return true; 5161} 5162 5163bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5164 // there is a possibility that this is an outstanding message for a 5165 // codec that we have already destroyed 5166 if (mCodec->mNode == 0) { 5167 ALOGI("ignoring message as already freed component: %s", 5168 msg->debugString().c_str()); 5169 return false; 5170 } 5171 5172 IOMX::node_id nodeID; 5173 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 5174 if (nodeID != mCodec->mNode) { 5175 ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode); 5176 return false; 5177 } 5178 return true; 5179} 5180 5181bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5182 sp<RefBase> obj; 5183 CHECK(msg->findObject("messages", &obj)); 5184 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5185 5186 bool receivedRenderedEvents = false; 5187 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5188 it != msgList->getList().cend(); ++it) { 5189 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5190 mCodec->handleMessage(*it); 5191 int32_t type; 5192 CHECK((*it)->findInt32("type", &type)); 5193 if (type == omx_message::FRAME_RENDERED) { 5194 receivedRenderedEvents = true; 5195 } 5196 } 5197 5198 if (receivedRenderedEvents) { 5199 // NOTE: all buffers are rendered in this case 5200 mCodec->notifyOfRenderedFrames(); 5201 } 5202 return true; 5203} 5204 5205bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5206 int32_t type; 5207 CHECK(msg->findInt32("type", &type)); 5208 5209 switch (type) { 5210 case omx_message::EVENT: 5211 { 5212 int32_t event, data1, data2; 5213 CHECK(msg->findInt32("event", &event)); 5214 CHECK(msg->findInt32("data1", &data1)); 5215 CHECK(msg->findInt32("data2", &data2)); 5216 5217 if (event == OMX_EventCmdComplete 5218 && data1 == OMX_CommandFlush 5219 && data2 == (int32_t)OMX_ALL) { 5220 // Use of this notification is not consistent across 5221 // implementations. We'll drop this notification and rely 5222 // on flush-complete notifications on the individual port 5223 // indices instead. 5224 5225 return true; 5226 } 5227 5228 return onOMXEvent( 5229 static_cast<OMX_EVENTTYPE>(event), 5230 static_cast<OMX_U32>(data1), 5231 static_cast<OMX_U32>(data2)); 5232 } 5233 5234 case omx_message::EMPTY_BUFFER_DONE: 5235 { 5236 IOMX::buffer_id bufferID; 5237 int32_t fenceFd; 5238 5239 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5240 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5241 5242 return onOMXEmptyBufferDone(bufferID, fenceFd); 5243 } 5244 5245 case omx_message::FILL_BUFFER_DONE: 5246 { 5247 IOMX::buffer_id bufferID; 5248 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5249 5250 int32_t rangeOffset, rangeLength, flags, fenceFd; 5251 int64_t timeUs; 5252 5253 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5254 CHECK(msg->findInt32("range_length", &rangeLength)); 5255 CHECK(msg->findInt32("flags", &flags)); 5256 CHECK(msg->findInt64("timestamp", &timeUs)); 5257 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5258 5259 return onOMXFillBufferDone( 5260 bufferID, 5261 (size_t)rangeOffset, (size_t)rangeLength, 5262 (OMX_U32)flags, 5263 timeUs, 5264 fenceFd); 5265 } 5266 5267 case omx_message::FRAME_RENDERED: 5268 { 5269 int64_t mediaTimeUs, systemNano; 5270 5271 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5272 CHECK(msg->findInt64("system_nano", &systemNano)); 5273 5274 return onOMXFrameRendered( 5275 mediaTimeUs, systemNano); 5276 } 5277 5278 default: 5279 ALOGE("Unexpected message type: %d", type); 5280 return false; 5281 } 5282} 5283 5284bool ACodec::BaseState::onOMXFrameRendered( 5285 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5286 // ignore outside of Executing and PortSettingsChanged states 5287 return true; 5288} 5289 5290bool ACodec::BaseState::onOMXEvent( 5291 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5292 if (event == OMX_EventDataSpaceChanged) { 5293 ColorAspects aspects; 5294 aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF); 5295 aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF); 5296 aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF); 5297 aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF); 5298 5299 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5300 return true; 5301 } 5302 5303 if (event != OMX_EventError) { 5304 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5305 mCodec->mComponentName.c_str(), event, data1, data2); 5306 5307 return false; 5308 } 5309 5310 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5311 5312 // verify OMX component sends back an error we expect. 5313 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5314 if (!isOMXError(omxError)) { 5315 ALOGW("Invalid OMX error %#x", omxError); 5316 omxError = OMX_ErrorUndefined; 5317 } 5318 mCodec->signalError(omxError); 5319 5320 return true; 5321} 5322 5323bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5324 ALOGV("[%s] onOMXEmptyBufferDone %u", 5325 mCodec->mComponentName.c_str(), bufferID); 5326 5327 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5328 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5329 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5330 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5331 mCodec->dumpBuffers(kPortIndexInput); 5332 if (fenceFd >= 0) { 5333 ::close(fenceFd); 5334 } 5335 return false; 5336 } 5337 info->mStatus = BufferInfo::OWNED_BY_US; 5338 5339 // input buffers cannot take fences, so wait for any fence now 5340 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5341 fenceFd = -1; 5342 5343 // still save fence for completeness 5344 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5345 5346 // We're in "store-metadata-in-buffers" mode, the underlying 5347 // OMX component had access to data that's implicitly refcounted 5348 // by this "MediaBuffer" object. Now that the OMX component has 5349 // told us that it's done with the input buffer, we can decrement 5350 // the mediaBuffer's reference count. 5351 info->mData->setMediaBufferBase(NULL); 5352 5353 PortMode mode = getPortMode(kPortIndexInput); 5354 5355 switch (mode) { 5356 case KEEP_BUFFERS: 5357 break; 5358 5359 case RESUBMIT_BUFFERS: 5360 postFillThisBuffer(info); 5361 break; 5362 5363 case FREE_BUFFERS: 5364 default: 5365 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5366 return false; 5367 } 5368 5369 return true; 5370} 5371 5372void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5373 if (mCodec->mPortEOS[kPortIndexInput]) { 5374 return; 5375 } 5376 5377 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5378 5379 sp<AMessage> notify = mCodec->mNotify->dup(); 5380 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5381 notify->setInt32("buffer-id", info->mBufferID); 5382 5383 info->mData->meta()->clear(); 5384 notify->setBuffer("buffer", info->mData); 5385 5386 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5387 reply->setInt32("buffer-id", info->mBufferID); 5388 5389 notify->setMessage("reply", reply); 5390 5391 notify->post(); 5392 5393 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5394} 5395 5396void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5397 IOMX::buffer_id bufferID; 5398 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5399 sp<ABuffer> buffer; 5400 int32_t err = OK; 5401 bool eos = false; 5402 PortMode mode = getPortMode(kPortIndexInput); 5403 5404 if (!msg->findBuffer("buffer", &buffer)) { 5405 /* these are unfilled buffers returned by client */ 5406 CHECK(msg->findInt32("err", &err)); 5407 5408 if (err == OK) { 5409 /* buffers with no errors are returned on MediaCodec.flush */ 5410 mode = KEEP_BUFFERS; 5411 } else { 5412 ALOGV("[%s] saw error %d instead of an input buffer", 5413 mCodec->mComponentName.c_str(), err); 5414 eos = true; 5415 } 5416 5417 buffer.clear(); 5418 } 5419 5420 int32_t tmp; 5421 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5422 eos = true; 5423 err = ERROR_END_OF_STREAM; 5424 } 5425 5426 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5427 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5428 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5429 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5430 mCodec->dumpBuffers(kPortIndexInput); 5431 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5432 return; 5433 } 5434 5435 info->mStatus = BufferInfo::OWNED_BY_US; 5436 5437 switch (mode) { 5438 case KEEP_BUFFERS: 5439 { 5440 if (eos) { 5441 if (!mCodec->mPortEOS[kPortIndexInput]) { 5442 mCodec->mPortEOS[kPortIndexInput] = true; 5443 mCodec->mInputEOSResult = err; 5444 } 5445 } 5446 break; 5447 } 5448 5449 case RESUBMIT_BUFFERS: 5450 { 5451 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5452 // Do not send empty input buffer w/o EOS to the component. 5453 if (buffer->size() == 0 && !eos) { 5454 postFillThisBuffer(info); 5455 break; 5456 } 5457 5458 int64_t timeUs; 5459 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5460 5461 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5462 5463 MetadataBufferType metaType = mCodec->mInputMetadataType; 5464 int32_t isCSD = 0; 5465 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5466 if (mCodec->mIsLegacyVP9Decoder) { 5467 ALOGV("[%s] is legacy VP9 decoder. Ignore %u codec specific data", 5468 mCodec->mComponentName.c_str(), bufferID); 5469 postFillThisBuffer(info); 5470 break; 5471 } 5472 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5473 metaType = kMetadataBufferTypeInvalid; 5474 } 5475 5476 if (eos) { 5477 flags |= OMX_BUFFERFLAG_EOS; 5478 } 5479 5480 if (buffer != info->mCodecData) { 5481 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5482 mCodec->mComponentName.c_str(), 5483 bufferID, 5484 buffer.get(), info->mCodecData.get()); 5485 5486 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; 5487 if (converter == NULL || isCSD) { 5488 converter = getCopyConverter(); 5489 } 5490 status_t err = converter->convert(buffer, info->mCodecData); 5491 if (err != OK) { 5492 mCodec->signalError(OMX_ErrorUndefined, err); 5493 return; 5494 } 5495 } 5496 5497 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5498 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5499 mCodec->mComponentName.c_str(), bufferID); 5500 } else if (flags & OMX_BUFFERFLAG_EOS) { 5501 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5502 mCodec->mComponentName.c_str(), bufferID); 5503 } else { 5504#if TRACK_BUFFER_TIMING 5505 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5506 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5507#else 5508 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5509 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5510#endif 5511 } 5512 5513#if TRACK_BUFFER_TIMING 5514 ACodec::BufferStats stats; 5515 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5516 stats.mFillBufferDoneTimeUs = -1ll; 5517 mCodec->mBufferStats.add(timeUs, stats); 5518#endif 5519 5520 if (mCodec->storingMetadataInDecodedBuffers()) { 5521 // try to submit an output buffer for each input buffer 5522 PortMode outputMode = getPortMode(kPortIndexOutput); 5523 5524 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 5525 mCodec->mMetadataBuffersToSubmit, 5526 (outputMode == FREE_BUFFERS ? "FREE" : 5527 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 5528 if (outputMode == RESUBMIT_BUFFERS) { 5529 mCodec->submitOutputMetadataBuffer(); 5530 } 5531 } 5532 info->checkReadFence("onInputBufferFilled"); 5533 5534 status_t err2 = OK; 5535 switch (metaType) { 5536 case kMetadataBufferTypeInvalid: 5537 break; 5538#ifndef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5539 case kMetadataBufferTypeNativeHandleSource: 5540 if (info->mCodecData->size() >= sizeof(VideoNativeHandleMetadata)) { 5541 VideoNativeHandleMetadata *vnhmd = 5542 (VideoNativeHandleMetadata*)info->mCodecData->base(); 5543 err2 = mCodec->mOMX->updateNativeHandleInMeta( 5544 mCodec->mNode, kPortIndexInput, 5545 NativeHandle::create(vnhmd->pHandle, false /* ownsHandle */), 5546 bufferID); 5547 } 5548 break; 5549 case kMetadataBufferTypeANWBuffer: 5550 if (info->mCodecData->size() >= sizeof(VideoNativeMetadata)) { 5551 VideoNativeMetadata *vnmd = (VideoNativeMetadata*)info->mCodecData->base(); 5552 err2 = mCodec->mOMX->updateGraphicBufferInMeta( 5553 mCodec->mNode, kPortIndexInput, 5554 new GraphicBuffer(vnmd->pBuffer, false /* keepOwnership */), 5555 bufferID); 5556 } 5557 break; 5558#endif 5559 default: 5560 ALOGW("Can't marshall %s data in %zu sized buffers in %zu-bit mode", 5561 asString(metaType), info->mCodecData->size(), 5562 sizeof(buffer_handle_t) * 8); 5563 err2 = ERROR_UNSUPPORTED; 5564 break; 5565 } 5566 5567 if (err2 == OK) { 5568 err2 = mCodec->mOMX->emptyBuffer( 5569 mCodec->mNode, 5570 bufferID, 5571 0, 5572 info->mCodecData->size(), 5573 flags, 5574 timeUs, 5575 info->mFenceFd); 5576 } 5577 info->mFenceFd = -1; 5578 if (err2 != OK) { 5579 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5580 return; 5581 } 5582 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5583 5584 if (!eos && err == OK) { 5585 getMoreInputDataIfPossible(); 5586 } else { 5587 ALOGV("[%s] Signalled EOS (%d) on the input port", 5588 mCodec->mComponentName.c_str(), err); 5589 5590 mCodec->mPortEOS[kPortIndexInput] = true; 5591 mCodec->mInputEOSResult = err; 5592 } 5593 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 5594 if (err != OK && err != ERROR_END_OF_STREAM) { 5595 ALOGV("[%s] Signalling EOS on the input port due to error %d", 5596 mCodec->mComponentName.c_str(), err); 5597 } else { 5598 ALOGV("[%s] Signalling EOS on the input port", 5599 mCodec->mComponentName.c_str()); 5600 } 5601 5602 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 5603 mCodec->mComponentName.c_str(), bufferID); 5604 5605 info->checkReadFence("onInputBufferFilled"); 5606 status_t err2 = mCodec->mOMX->emptyBuffer( 5607 mCodec->mNode, 5608 bufferID, 5609 0, 5610 0, 5611 OMX_BUFFERFLAG_EOS, 5612 0, 5613 info->mFenceFd); 5614 info->mFenceFd = -1; 5615 if (err2 != OK) { 5616 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5617 return; 5618 } 5619 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5620 5621 mCodec->mPortEOS[kPortIndexInput] = true; 5622 mCodec->mInputEOSResult = err; 5623 } 5624 break; 5625 } 5626 5627 case FREE_BUFFERS: 5628 break; 5629 5630 default: 5631 ALOGE("invalid port mode: %d", mode); 5632 break; 5633 } 5634} 5635 5636void ACodec::BaseState::getMoreInputDataIfPossible() { 5637 if (mCodec->mPortEOS[kPortIndexInput]) { 5638 return; 5639 } 5640 5641 BufferInfo *eligible = NULL; 5642 5643 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 5644 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5645 5646#if 0 5647 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 5648 // There's already a "read" pending. 5649 return; 5650 } 5651#endif 5652 5653 if (info->mStatus == BufferInfo::OWNED_BY_US) { 5654 eligible = info; 5655 } 5656 } 5657 5658 if (eligible == NULL) { 5659 return; 5660 } 5661 5662 postFillThisBuffer(eligible); 5663} 5664 5665bool ACodec::BaseState::onOMXFillBufferDone( 5666 IOMX::buffer_id bufferID, 5667 size_t rangeOffset, size_t rangeLength, 5668 OMX_U32 flags, 5669 int64_t timeUs, 5670 int fenceFd) { 5671 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 5672 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 5673 5674 ssize_t index; 5675 status_t err= OK; 5676 5677#if TRACK_BUFFER_TIMING 5678 index = mCodec->mBufferStats.indexOfKey(timeUs); 5679 if (index >= 0) { 5680 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 5681 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 5682 5683 ALOGI("frame PTS %lld: %lld", 5684 timeUs, 5685 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 5686 5687 mCodec->mBufferStats.removeItemsAt(index); 5688 stats = NULL; 5689 } 5690#endif 5691 5692 BufferInfo *info = 5693 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5694 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5695 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5696 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5697 mCodec->dumpBuffers(kPortIndexOutput); 5698 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5699 if (fenceFd >= 0) { 5700 ::close(fenceFd); 5701 } 5702 return true; 5703 } 5704 5705 info->mDequeuedAt = ++mCodec->mDequeueCounter; 5706 info->mStatus = BufferInfo::OWNED_BY_US; 5707 5708 if (info->mRenderInfo != NULL) { 5709 // The fence for an emptied buffer must have signaled, but there still could be queued 5710 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 5711 // as we will soon requeue this buffer to the surface. While in theory we could still keep 5712 // track of buffers that are requeued to the surface, it is better to add support to the 5713 // buffer-queue to notify us of released buffers and their fences (in the future). 5714 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 5715 } 5716 5717 // byte buffers cannot take fences, so wait for any fence now 5718 if (mCodec->mNativeWindow == NULL) { 5719 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 5720 fenceFd = -1; 5721 } 5722 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 5723 5724 PortMode mode = getPortMode(kPortIndexOutput); 5725 5726 switch (mode) { 5727 case KEEP_BUFFERS: 5728 break; 5729 5730 case RESUBMIT_BUFFERS: 5731 { 5732 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 5733 || mCodec->mPortEOS[kPortIndexOutput])) { 5734 ALOGV("[%s] calling fillBuffer %u", 5735 mCodec->mComponentName.c_str(), info->mBufferID); 5736 5737 err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 5738 info->mFenceFd = -1; 5739 if (err != OK) { 5740 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5741 return true; 5742 } 5743 5744 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5745 break; 5746 } 5747 5748 sp<AMessage> reply = 5749 new AMessage(kWhatOutputBufferDrained, mCodec); 5750 5751 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 5752 // pretend that output format has changed on the first frame (we used to do this) 5753 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) { 5754 mCodec->onOutputFormatChanged(mCodec->mOutputFormat); 5755 } 5756 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 5757 mCodec->sendFormatChange(); 5758 } else if (rangeLength > 0 && mCodec->mNativeWindow != NULL) { 5759 // If potentially rendering onto a surface, always save key format data (crop & 5760 // data space) so that we can set it if and once the buffer is rendered. 5761 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 5762 } 5763 5764 if (mCodec->usingMetadataOnEncoderOutput()) { 5765 native_handle_t *handle = NULL; 5766 VideoNativeHandleMetadata &nativeMeta = 5767 *(VideoNativeHandleMetadata *)info->mData->data(); 5768 if (info->mData->size() >= sizeof(nativeMeta) 5769 && nativeMeta.eType == kMetadataBufferTypeNativeHandleSource) { 5770#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5771 // handle is only valid on 32-bit/mediaserver process 5772 handle = NULL; 5773#else 5774 handle = (native_handle_t *)nativeMeta.pHandle; 5775#endif 5776 } 5777 info->mData->meta()->setPointer("handle", handle); 5778 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 5779 info->mData->meta()->setInt32("rangeLength", rangeLength); 5780 } else if (info->mData == info->mCodecData) { 5781 info->mData->setRange(rangeOffset, rangeLength); 5782 } else { 5783 info->mCodecData->setRange(rangeOffset, rangeLength); 5784 // in this case we know that mConverter is not null 5785 status_t err = mCodec->mConverter[kPortIndexOutput]->convert( 5786 info->mCodecData, info->mData); 5787 if (err != OK) { 5788 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5789 return true; 5790 } 5791 } 5792#if 0 5793 if (mCodec->mNativeWindow == NULL) { 5794 if (IsIDR(info->mData)) { 5795 ALOGI("IDR frame"); 5796 } 5797 } 5798#endif 5799 5800 if (mCodec->mSkipCutBuffer != NULL) { 5801 mCodec->mSkipCutBuffer->submit(info->mData); 5802 } 5803 info->mData->meta()->setInt64("timeUs", timeUs); 5804 5805 sp<AMessage> notify = mCodec->mNotify->dup(); 5806 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 5807 notify->setInt32("buffer-id", info->mBufferID); 5808 notify->setBuffer("buffer", info->mData); 5809 notify->setInt32("flags", flags); 5810 5811 reply->setInt32("buffer-id", info->mBufferID); 5812 5813 notify->setMessage("reply", reply); 5814 5815 notify->post(); 5816 5817 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 5818 5819 if (flags & OMX_BUFFERFLAG_EOS) { 5820 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 5821 5822 sp<AMessage> notify = mCodec->mNotify->dup(); 5823 notify->setInt32("what", CodecBase::kWhatEOS); 5824 notify->setInt32("err", mCodec->mInputEOSResult); 5825 notify->post(); 5826 5827 mCodec->mPortEOS[kPortIndexOutput] = true; 5828 } 5829 break; 5830 } 5831 5832 case FREE_BUFFERS: 5833 err = mCodec->freeBuffer(kPortIndexOutput, index); 5834 if (err != OK) { 5835 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5836 return true; 5837 } 5838 break; 5839 5840 default: 5841 ALOGE("Invalid port mode: %d", mode); 5842 return false; 5843 } 5844 5845 return true; 5846} 5847 5848void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 5849 IOMX::buffer_id bufferID; 5850 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5851 ssize_t index; 5852 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5853 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5854 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 5855 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5856 mCodec->dumpBuffers(kPortIndexOutput); 5857 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5858 return; 5859 } 5860 5861 android_native_rect_t crop; 5862 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom) 5863 && memcmp(&crop, &mCodec->mLastNativeWindowCrop, sizeof(crop)) != 0) { 5864 mCodec->mLastNativeWindowCrop = crop; 5865 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 5866 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 5867 } 5868 5869 int32_t dataSpace; 5870 if (msg->findInt32("dataspace", &dataSpace) 5871 && dataSpace != mCodec->mLastNativeWindowDataSpace) { 5872 status_t err = native_window_set_buffers_data_space( 5873 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 5874 mCodec->mLastNativeWindowDataSpace = dataSpace; 5875 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 5876 } 5877 5878 int32_t render; 5879 if (mCodec->mNativeWindow != NULL 5880 && msg->findInt32("render", &render) && render != 0 5881 && info->mData != NULL && info->mData->size() != 0) { 5882 ATRACE_NAME("render"); 5883 // The client wants this buffer to be rendered. 5884 5885 // save buffers sent to the surface so we can get render time when they return 5886 int64_t mediaTimeUs = -1; 5887 info->mData->meta()->findInt64("timeUs", &mediaTimeUs); 5888 if (mediaTimeUs >= 0) { 5889 mCodec->mRenderTracker.onFrameQueued( 5890 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 5891 } 5892 5893 int64_t timestampNs = 0; 5894 if (!msg->findInt64("timestampNs", ×tampNs)) { 5895 // use media timestamp if client did not request a specific render timestamp 5896 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 5897 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 5898 timestampNs *= 1000; 5899 } 5900 } 5901 5902 status_t err; 5903 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 5904 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 5905 5906 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 5907 err = mCodec->mNativeWindow->queueBuffer( 5908 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 5909 info->mFenceFd = -1; 5910 if (err == OK) { 5911 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 5912 } else { 5913 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 5914 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5915 info->mStatus = BufferInfo::OWNED_BY_US; 5916 // keeping read fence as write fence to avoid clobbering 5917 info->mIsReadFence = false; 5918 } 5919 } else { 5920 if (mCodec->mNativeWindow != NULL && 5921 (info->mData == NULL || info->mData->size() != 0)) { 5922 // move read fence into write fence to avoid clobbering 5923 info->mIsReadFence = false; 5924 ATRACE_NAME("frame-drop"); 5925 } 5926 info->mStatus = BufferInfo::OWNED_BY_US; 5927 } 5928 5929 PortMode mode = getPortMode(kPortIndexOutput); 5930 5931 switch (mode) { 5932 case KEEP_BUFFERS: 5933 { 5934 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 5935 5936 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 5937 // We cannot resubmit the buffer we just rendered, dequeue 5938 // the spare instead. 5939 5940 info = mCodec->dequeueBufferFromNativeWindow(); 5941 } 5942 break; 5943 } 5944 5945 case RESUBMIT_BUFFERS: 5946 { 5947 if (!mCodec->mPortEOS[kPortIndexOutput]) { 5948 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 5949 // We cannot resubmit the buffer we just rendered, dequeue 5950 // the spare instead. 5951 5952 info = mCodec->dequeueBufferFromNativeWindow(); 5953 } 5954 5955 if (info != NULL) { 5956 ALOGV("[%s] calling fillBuffer %u", 5957 mCodec->mComponentName.c_str(), info->mBufferID); 5958 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 5959 status_t err = mCodec->mOMX->fillBuffer( 5960 mCodec->mNode, info->mBufferID, info->mFenceFd); 5961 info->mFenceFd = -1; 5962 if (err == OK) { 5963 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5964 } else { 5965 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5966 } 5967 } 5968 } 5969 break; 5970 } 5971 5972 case FREE_BUFFERS: 5973 { 5974 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 5975 if (err != OK) { 5976 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5977 } 5978 break; 5979 } 5980 5981 default: 5982 ALOGE("Invalid port mode: %d", mode); 5983 return; 5984 } 5985} 5986 5987//////////////////////////////////////////////////////////////////////////////// 5988 5989ACodec::UninitializedState::UninitializedState(ACodec *codec) 5990 : BaseState(codec) { 5991} 5992 5993void ACodec::UninitializedState::stateEntered() { 5994 ALOGV("Now uninitialized"); 5995 5996 if (mDeathNotifier != NULL) { 5997 mCodec->mNodeBinder->unlinkToDeath(mDeathNotifier); 5998 mDeathNotifier.clear(); 5999 } 6000 6001 mCodec->mUsingNativeWindow = false; 6002 mCodec->mNativeWindow.clear(); 6003 mCodec->mNativeWindowUsageBits = 0; 6004 mCodec->mNode = 0; 6005 mCodec->mOMX.clear(); 6006 mCodec->mQuirks = 0; 6007 mCodec->mFlags = 0; 6008 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 6009 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 6010 mCodec->mConverter[0].clear(); 6011 mCodec->mConverter[1].clear(); 6012 mCodec->mComponentName.clear(); 6013} 6014 6015bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 6016 bool handled = false; 6017 6018 switch (msg->what()) { 6019 case ACodec::kWhatSetup: 6020 { 6021 onSetup(msg); 6022 6023 handled = true; 6024 break; 6025 } 6026 6027 case ACodec::kWhatAllocateComponent: 6028 { 6029 onAllocateComponent(msg); 6030 handled = true; 6031 break; 6032 } 6033 6034 case ACodec::kWhatShutdown: 6035 { 6036 int32_t keepComponentAllocated; 6037 CHECK(msg->findInt32( 6038 "keepComponentAllocated", &keepComponentAllocated)); 6039 ALOGW_IF(keepComponentAllocated, 6040 "cannot keep component allocated on shutdown in Uninitialized state"); 6041 6042 sp<AMessage> notify = mCodec->mNotify->dup(); 6043 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6044 notify->post(); 6045 6046 handled = true; 6047 break; 6048 } 6049 6050 case ACodec::kWhatFlush: 6051 { 6052 sp<AMessage> notify = mCodec->mNotify->dup(); 6053 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6054 notify->post(); 6055 6056 handled = true; 6057 break; 6058 } 6059 6060 case ACodec::kWhatReleaseCodecInstance: 6061 { 6062 // nothing to do, as we have already signaled shutdown 6063 handled = true; 6064 break; 6065 } 6066 6067 default: 6068 return BaseState::onMessageReceived(msg); 6069 } 6070 6071 return handled; 6072} 6073 6074void ACodec::UninitializedState::onSetup( 6075 const sp<AMessage> &msg) { 6076 if (onAllocateComponent(msg) 6077 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6078 mCodec->mLoadedState->onStart(); 6079 } 6080} 6081 6082bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6083 ALOGV("onAllocateComponent"); 6084 6085 CHECK(mCodec->mNode == 0); 6086 6087 OMXClient client; 6088 if (client.connect() != OK) { 6089 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6090 return false; 6091 } 6092 6093 sp<IOMX> omx = client.interface(); 6094 6095 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6096 6097 Vector<AString> matchingCodecs; 6098 6099 AString mime; 6100 6101 AString componentName; 6102 uint32_t quirks = 0; 6103 int32_t encoder = false; 6104 if (msg->findString("componentName", &componentName)) { 6105 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6106 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6107 matchingCodecs.add(componentName); 6108 } 6109 } else { 6110 CHECK(msg->findString("mime", &mime)); 6111 6112 if (!msg->findInt32("encoder", &encoder)) { 6113 encoder = false; 6114 } 6115 6116 MediaCodecList::findMatchingCodecs( 6117 mime.c_str(), 6118 encoder, // createEncoder 6119 0, // flags 6120 &matchingCodecs); 6121 } 6122 6123 sp<CodecObserver> observer = new CodecObserver; 6124 IOMX::node_id node = 0; 6125 6126 status_t err = NAME_NOT_FOUND; 6127 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6128 ++matchIndex) { 6129 componentName = matchingCodecs[matchIndex]; 6130 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6131 6132 pid_t tid = gettid(); 6133 int prevPriority = androidGetThreadPriority(tid); 6134 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6135 err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node); 6136 androidSetThreadPriority(tid, prevPriority); 6137 6138 if (err == OK) { 6139 break; 6140 } else { 6141 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6142 } 6143 6144 node = 0; 6145 } 6146 6147 if (node == 0) { 6148 if (!mime.empty()) { 6149 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6150 encoder ? "en" : "de", mime.c_str(), err); 6151 } else { 6152 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6153 } 6154 6155 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6156 return false; 6157 } 6158 6159 mDeathNotifier = new DeathNotifier(notify); 6160 if (mCodec->mNodeBinder == NULL || 6161 mCodec->mNodeBinder->linkToDeath(mDeathNotifier) != OK) { 6162 // This was a local binder, if it dies so do we, we won't care 6163 // about any notifications in the afterlife. 6164 mDeathNotifier.clear(); 6165 } 6166 6167 notify = new AMessage(kWhatOMXMessageList, mCodec); 6168 observer->setNotificationMessage(notify); 6169 6170 mCodec->mComponentName = componentName; 6171 mCodec->mRenderTracker.setComponentName(componentName); 6172 mCodec->mFlags = 0; 6173 6174 if (componentName.endsWith(".secure")) { 6175 mCodec->mFlags |= kFlagIsSecure; 6176 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6177 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6178 } 6179 6180 mCodec->mQuirks = quirks; 6181 mCodec->mOMX = omx; 6182 mCodec->mNode = node; 6183 6184 { 6185 sp<AMessage> notify = mCodec->mNotify->dup(); 6186 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6187 notify->setString("componentName", mCodec->mComponentName.c_str()); 6188 notify->post(); 6189 } 6190 6191 mCodec->changeState(mCodec->mLoadedState); 6192 6193 return true; 6194} 6195 6196//////////////////////////////////////////////////////////////////////////////// 6197 6198ACodec::LoadedState::LoadedState(ACodec *codec) 6199 : BaseState(codec) { 6200} 6201 6202void ACodec::LoadedState::stateEntered() { 6203 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6204 6205 mCodec->mPortEOS[kPortIndexInput] = 6206 mCodec->mPortEOS[kPortIndexOutput] = false; 6207 6208 mCodec->mInputEOSResult = OK; 6209 6210 mCodec->mDequeueCounter = 0; 6211 mCodec->mMetadataBuffersToSubmit = 0; 6212 mCodec->mRepeatFrameDelayUs = -1ll; 6213 mCodec->mInputFormat.clear(); 6214 mCodec->mOutputFormat.clear(); 6215 mCodec->mBaseOutputFormat.clear(); 6216 6217 if (mCodec->mShutdownInProgress) { 6218 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6219 6220 mCodec->mShutdownInProgress = false; 6221 mCodec->mKeepComponentAllocated = false; 6222 6223 onShutdown(keepComponentAllocated); 6224 } 6225 mCodec->mExplicitShutdown = false; 6226 6227 mCodec->processDeferredMessages(); 6228} 6229 6230void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6231 if (!keepComponentAllocated) { 6232 (void)mCodec->mOMX->freeNode(mCodec->mNode); 6233 6234 mCodec->changeState(mCodec->mUninitializedState); 6235 } 6236 6237 if (mCodec->mExplicitShutdown) { 6238 sp<AMessage> notify = mCodec->mNotify->dup(); 6239 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6240 notify->post(); 6241 mCodec->mExplicitShutdown = false; 6242 } 6243} 6244 6245bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6246 bool handled = false; 6247 6248 switch (msg->what()) { 6249 case ACodec::kWhatConfigureComponent: 6250 { 6251 onConfigureComponent(msg); 6252 handled = true; 6253 break; 6254 } 6255 6256 case ACodec::kWhatCreateInputSurface: 6257 { 6258 onCreateInputSurface(msg); 6259 handled = true; 6260 break; 6261 } 6262 6263 case ACodec::kWhatSetInputSurface: 6264 { 6265 onSetInputSurface(msg); 6266 handled = true; 6267 break; 6268 } 6269 6270 case ACodec::kWhatStart: 6271 { 6272 onStart(); 6273 handled = true; 6274 break; 6275 } 6276 6277 case ACodec::kWhatShutdown: 6278 { 6279 int32_t keepComponentAllocated; 6280 CHECK(msg->findInt32( 6281 "keepComponentAllocated", &keepComponentAllocated)); 6282 6283 mCodec->mExplicitShutdown = true; 6284 onShutdown(keepComponentAllocated); 6285 6286 handled = true; 6287 break; 6288 } 6289 6290 case ACodec::kWhatFlush: 6291 { 6292 sp<AMessage> notify = mCodec->mNotify->dup(); 6293 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6294 notify->post(); 6295 6296 handled = true; 6297 break; 6298 } 6299 6300 default: 6301 return BaseState::onMessageReceived(msg); 6302 } 6303 6304 return handled; 6305} 6306 6307bool ACodec::LoadedState::onConfigureComponent( 6308 const sp<AMessage> &msg) { 6309 ALOGV("onConfigureComponent"); 6310 6311 CHECK(mCodec->mNode != 0); 6312 6313 status_t err = OK; 6314 AString mime; 6315 if (!msg->findString("mime", &mime)) { 6316 err = BAD_VALUE; 6317 } else { 6318 err = mCodec->configureCodec(mime.c_str(), msg); 6319 } 6320 if (err != OK) { 6321 ALOGE("[%s] configureCodec returning error %d", 6322 mCodec->mComponentName.c_str(), err); 6323 6324 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6325 return false; 6326 } 6327 6328 { 6329 sp<AMessage> notify = mCodec->mNotify->dup(); 6330 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6331 notify->setMessage("input-format", mCodec->mInputFormat); 6332 notify->setMessage("output-format", mCodec->mOutputFormat); 6333 notify->post(); 6334 } 6335 6336 return true; 6337} 6338 6339status_t ACodec::LoadedState::setupInputSurface() { 6340 status_t err = OK; 6341 6342 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6343 err = mCodec->mOMX->setInternalOption( 6344 mCodec->mNode, 6345 kPortIndexInput, 6346 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 6347 &mCodec->mRepeatFrameDelayUs, 6348 sizeof(mCodec->mRepeatFrameDelayUs)); 6349 6350 if (err != OK) { 6351 ALOGE("[%s] Unable to configure option to repeat previous " 6352 "frames (err %d)", 6353 mCodec->mComponentName.c_str(), 6354 err); 6355 return err; 6356 } 6357 } 6358 6359 if (mCodec->mMaxPtsGapUs > 0ll) { 6360 err = mCodec->mOMX->setInternalOption( 6361 mCodec->mNode, 6362 kPortIndexInput, 6363 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 6364 &mCodec->mMaxPtsGapUs, 6365 sizeof(mCodec->mMaxPtsGapUs)); 6366 6367 if (err != OK) { 6368 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6369 mCodec->mComponentName.c_str(), 6370 err); 6371 return err; 6372 } 6373 } 6374 6375 if (mCodec->mMaxFps > 0) { 6376 err = mCodec->mOMX->setInternalOption( 6377 mCodec->mNode, 6378 kPortIndexInput, 6379 IOMX::INTERNAL_OPTION_MAX_FPS, 6380 &mCodec->mMaxFps, 6381 sizeof(mCodec->mMaxFps)); 6382 6383 if (err != OK) { 6384 ALOGE("[%s] Unable to configure max fps (err %d)", 6385 mCodec->mComponentName.c_str(), 6386 err); 6387 return err; 6388 } 6389 } 6390 6391 if (mCodec->mTimePerCaptureUs > 0ll 6392 && mCodec->mTimePerFrameUs > 0ll) { 6393 int64_t timeLapse[2]; 6394 timeLapse[0] = mCodec->mTimePerFrameUs; 6395 timeLapse[1] = mCodec->mTimePerCaptureUs; 6396 err = mCodec->mOMX->setInternalOption( 6397 mCodec->mNode, 6398 kPortIndexInput, 6399 IOMX::INTERNAL_OPTION_TIME_LAPSE, 6400 &timeLapse[0], 6401 sizeof(timeLapse)); 6402 6403 if (err != OK) { 6404 ALOGE("[%s] Unable to configure time lapse (err %d)", 6405 mCodec->mComponentName.c_str(), 6406 err); 6407 return err; 6408 } 6409 } 6410 6411 if (mCodec->mCreateInputBuffersSuspended) { 6412 bool suspend = true; 6413 err = mCodec->mOMX->setInternalOption( 6414 mCodec->mNode, 6415 kPortIndexInput, 6416 IOMX::INTERNAL_OPTION_SUSPEND, 6417 &suspend, 6418 sizeof(suspend)); 6419 6420 if (err != OK) { 6421 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6422 mCodec->mComponentName.c_str(), 6423 err); 6424 return err; 6425 } 6426 } 6427 6428 uint32_t usageBits; 6429 if (mCodec->mOMX->getParameter( 6430 mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6431 &usageBits, sizeof(usageBits)) == OK) { 6432 mCodec->mInputFormat->setInt32( 6433 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6434 } 6435 6436 sp<ABuffer> colorAspectsBuffer; 6437 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6438 err = mCodec->mOMX->setInternalOption( 6439 mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS, 6440 colorAspectsBuffer->base(), colorAspectsBuffer->capacity()); 6441 if (err != OK) { 6442 ALOGE("[%s] Unable to configure color aspects (err %d)", 6443 mCodec->mComponentName.c_str(), err); 6444 return err; 6445 } 6446 } 6447 return OK; 6448} 6449 6450void ACodec::LoadedState::onCreateInputSurface( 6451 const sp<AMessage> & /* msg */) { 6452 ALOGV("onCreateInputSurface"); 6453 6454 sp<AMessage> notify = mCodec->mNotify->dup(); 6455 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6456 6457 android_dataspace dataSpace; 6458 status_t err = 6459 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6460 notify->setMessage("input-format", mCodec->mInputFormat); 6461 notify->setMessage("output-format", mCodec->mOutputFormat); 6462 6463 sp<IGraphicBufferProducer> bufferProducer; 6464 if (err == OK) { 6465 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6466 err = mCodec->mOMX->createInputSurface( 6467 mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, 6468 &mCodec->mInputMetadataType); 6469 // framework uses ANW buffers internally instead of gralloc handles 6470 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6471 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6472 } 6473 } 6474 6475 if (err == OK) { 6476 err = setupInputSurface(); 6477 } 6478 6479 if (err == OK) { 6480 notify->setObject("input-surface", 6481 new BufferProducerWrapper(bufferProducer)); 6482 } else { 6483 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6484 // the error through because it's in the "configured" state. We 6485 // send a kWhatInputSurfaceCreated with an error value instead. 6486 ALOGE("[%s] onCreateInputSurface returning error %d", 6487 mCodec->mComponentName.c_str(), err); 6488 notify->setInt32("err", err); 6489 } 6490 notify->post(); 6491} 6492 6493void ACodec::LoadedState::onSetInputSurface( 6494 const sp<AMessage> &msg) { 6495 ALOGV("onSetInputSurface"); 6496 6497 sp<AMessage> notify = mCodec->mNotify->dup(); 6498 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6499 6500 sp<RefBase> obj; 6501 CHECK(msg->findObject("input-surface", &obj)); 6502 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6503 6504 android_dataspace dataSpace; 6505 status_t err = 6506 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6507 notify->setMessage("input-format", mCodec->mInputFormat); 6508 notify->setMessage("output-format", mCodec->mOutputFormat); 6509 6510 if (err == OK) { 6511 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6512 err = mCodec->mOMX->setInputSurface( 6513 mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(), 6514 &mCodec->mInputMetadataType); 6515 // framework uses ANW buffers internally instead of gralloc handles 6516 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6517 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6518 } 6519 } 6520 6521 if (err == OK) { 6522 surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace); 6523 err = setupInputSurface(); 6524 } 6525 6526 if (err != OK) { 6527 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6528 // the error through because it's in the "configured" state. We 6529 // send a kWhatInputSurfaceAccepted with an error value instead. 6530 ALOGE("[%s] onSetInputSurface returning error %d", 6531 mCodec->mComponentName.c_str(), err); 6532 notify->setInt32("err", err); 6533 } 6534 notify->post(); 6535} 6536 6537void ACodec::LoadedState::onStart() { 6538 ALOGV("onStart"); 6539 6540 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6541 if (err != OK) { 6542 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6543 } else { 6544 mCodec->changeState(mCodec->mLoadedToIdleState); 6545 } 6546} 6547 6548//////////////////////////////////////////////////////////////////////////////// 6549 6550ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 6551 : BaseState(codec) { 6552} 6553 6554void ACodec::LoadedToIdleState::stateEntered() { 6555 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 6556 6557 status_t err; 6558 if ((err = allocateBuffers()) != OK) { 6559 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 6560 "(error 0x%08x)", 6561 err); 6562 6563 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6564 6565 mCodec->mOMX->sendCommand( 6566 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 6567 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 6568 mCodec->freeBuffersOnPort(kPortIndexInput); 6569 } 6570 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 6571 mCodec->freeBuffersOnPort(kPortIndexOutput); 6572 } 6573 6574 mCodec->changeState(mCodec->mLoadedState); 6575 } 6576} 6577 6578status_t ACodec::LoadedToIdleState::allocateBuffers() { 6579 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 6580 6581 if (err != OK) { 6582 return err; 6583 } 6584 6585 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 6586} 6587 6588bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 6589 switch (msg->what()) { 6590 case kWhatSetParameters: 6591 case kWhatShutdown: 6592 { 6593 mCodec->deferMessage(msg); 6594 return true; 6595 } 6596 6597 case kWhatSignalEndOfInputStream: 6598 { 6599 mCodec->onSignalEndOfInputStream(); 6600 return true; 6601 } 6602 6603 case kWhatResume: 6604 { 6605 // We'll be active soon enough. 6606 return true; 6607 } 6608 6609 case kWhatFlush: 6610 { 6611 // We haven't even started yet, so we're flushed alright... 6612 sp<AMessage> notify = mCodec->mNotify->dup(); 6613 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6614 notify->post(); 6615 return true; 6616 } 6617 6618 default: 6619 return BaseState::onMessageReceived(msg); 6620 } 6621} 6622 6623bool ACodec::LoadedToIdleState::onOMXEvent( 6624 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6625 switch (event) { 6626 case OMX_EventCmdComplete: 6627 { 6628 status_t err = OK; 6629 if (data1 != (OMX_U32)OMX_CommandStateSet 6630 || data2 != (OMX_U32)OMX_StateIdle) { 6631 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 6632 asString((OMX_COMMANDTYPE)data1), data1, 6633 asString((OMX_STATETYPE)data2), data2); 6634 err = FAILED_TRANSACTION; 6635 } 6636 6637 if (err == OK) { 6638 err = mCodec->mOMX->sendCommand( 6639 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting); 6640 } 6641 6642 if (err != OK) { 6643 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6644 } else { 6645 mCodec->changeState(mCodec->mIdleToExecutingState); 6646 } 6647 6648 return true; 6649 } 6650 6651 default: 6652 return BaseState::onOMXEvent(event, data1, data2); 6653 } 6654} 6655 6656//////////////////////////////////////////////////////////////////////////////// 6657 6658ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 6659 : BaseState(codec) { 6660} 6661 6662void ACodec::IdleToExecutingState::stateEntered() { 6663 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 6664} 6665 6666bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6667 switch (msg->what()) { 6668 case kWhatSetParameters: 6669 case kWhatShutdown: 6670 { 6671 mCodec->deferMessage(msg); 6672 return true; 6673 } 6674 6675 case kWhatResume: 6676 { 6677 // We'll be active soon enough. 6678 return true; 6679 } 6680 6681 case kWhatFlush: 6682 { 6683 // We haven't even started yet, so we're flushed alright... 6684 sp<AMessage> notify = mCodec->mNotify->dup(); 6685 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6686 notify->post(); 6687 6688 return true; 6689 } 6690 6691 case kWhatSignalEndOfInputStream: 6692 { 6693 mCodec->onSignalEndOfInputStream(); 6694 return true; 6695 } 6696 6697 default: 6698 return BaseState::onMessageReceived(msg); 6699 } 6700} 6701 6702bool ACodec::IdleToExecutingState::onOMXEvent( 6703 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6704 switch (event) { 6705 case OMX_EventCmdComplete: 6706 { 6707 if (data1 != (OMX_U32)OMX_CommandStateSet 6708 || data2 != (OMX_U32)OMX_StateExecuting) { 6709 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 6710 asString((OMX_COMMANDTYPE)data1), data1, 6711 asString((OMX_STATETYPE)data2), data2); 6712 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6713 return true; 6714 } 6715 6716 mCodec->mExecutingState->resume(); 6717 mCodec->changeState(mCodec->mExecutingState); 6718 6719 return true; 6720 } 6721 6722 default: 6723 return BaseState::onOMXEvent(event, data1, data2); 6724 } 6725} 6726 6727//////////////////////////////////////////////////////////////////////////////// 6728 6729ACodec::ExecutingState::ExecutingState(ACodec *codec) 6730 : BaseState(codec), 6731 mActive(false) { 6732} 6733 6734ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 6735 OMX_U32 /* portIndex */) { 6736 return RESUBMIT_BUFFERS; 6737} 6738 6739void ACodec::ExecutingState::submitOutputMetaBuffers() { 6740 // submit as many buffers as there are input buffers with the codec 6741 // in case we are in port reconfiguring 6742 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 6743 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6744 6745 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 6746 if (mCodec->submitOutputMetadataBuffer() != OK) 6747 break; 6748 } 6749 } 6750 6751 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 6752 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 6753} 6754 6755void ACodec::ExecutingState::submitRegularOutputBuffers() { 6756 bool failed = false; 6757 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 6758 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 6759 6760 if (mCodec->mNativeWindow != NULL) { 6761 if (info->mStatus != BufferInfo::OWNED_BY_US 6762 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6763 ALOGE("buffers should be owned by us or the surface"); 6764 failed = true; 6765 break; 6766 } 6767 6768 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6769 continue; 6770 } 6771 } else { 6772 if (info->mStatus != BufferInfo::OWNED_BY_US) { 6773 ALOGE("buffers should be owned by us"); 6774 failed = true; 6775 break; 6776 } 6777 } 6778 6779 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 6780 6781 info->checkWriteFence("submitRegularOutputBuffers"); 6782 status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 6783 info->mFenceFd = -1; 6784 if (err != OK) { 6785 failed = true; 6786 break; 6787 } 6788 6789 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6790 } 6791 6792 if (failed) { 6793 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6794 } 6795} 6796 6797void ACodec::ExecutingState::submitOutputBuffers() { 6798 submitRegularOutputBuffers(); 6799 if (mCodec->storingMetadataInDecodedBuffers()) { 6800 submitOutputMetaBuffers(); 6801 } 6802} 6803 6804void ACodec::ExecutingState::resume() { 6805 if (mActive) { 6806 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 6807 return; 6808 } 6809 6810 submitOutputBuffers(); 6811 6812 // Post all available input buffers 6813 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 6814 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 6815 } 6816 6817 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 6818 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6819 if (info->mStatus == BufferInfo::OWNED_BY_US) { 6820 postFillThisBuffer(info); 6821 } 6822 } 6823 6824 mActive = true; 6825} 6826 6827void ACodec::ExecutingState::stateEntered() { 6828 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 6829 6830 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 6831 mCodec->processDeferredMessages(); 6832} 6833 6834bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6835 bool handled = false; 6836 6837 switch (msg->what()) { 6838 case kWhatShutdown: 6839 { 6840 int32_t keepComponentAllocated; 6841 CHECK(msg->findInt32( 6842 "keepComponentAllocated", &keepComponentAllocated)); 6843 6844 mCodec->mShutdownInProgress = true; 6845 mCodec->mExplicitShutdown = true; 6846 mCodec->mKeepComponentAllocated = keepComponentAllocated; 6847 6848 mActive = false; 6849 6850 status_t err = mCodec->mOMX->sendCommand( 6851 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6852 if (err != OK) { 6853 if (keepComponentAllocated) { 6854 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6855 } 6856 // TODO: do some recovery here. 6857 } else { 6858 mCodec->changeState(mCodec->mExecutingToIdleState); 6859 } 6860 6861 handled = true; 6862 break; 6863 } 6864 6865 case kWhatFlush: 6866 { 6867 ALOGV("[%s] ExecutingState flushing now " 6868 "(codec owns %zu/%zu input, %zu/%zu output).", 6869 mCodec->mComponentName.c_str(), 6870 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 6871 mCodec->mBuffers[kPortIndexInput].size(), 6872 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 6873 mCodec->mBuffers[kPortIndexOutput].size()); 6874 6875 mActive = false; 6876 6877 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL); 6878 if (err != OK) { 6879 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6880 } else { 6881 mCodec->changeState(mCodec->mFlushingState); 6882 } 6883 6884 handled = true; 6885 break; 6886 } 6887 6888 case kWhatResume: 6889 { 6890 resume(); 6891 6892 handled = true; 6893 break; 6894 } 6895 6896 case kWhatRequestIDRFrame: 6897 { 6898 status_t err = mCodec->requestIDRFrame(); 6899 if (err != OK) { 6900 ALOGW("Requesting an IDR frame failed."); 6901 } 6902 6903 handled = true; 6904 break; 6905 } 6906 6907 case kWhatSetParameters: 6908 { 6909 sp<AMessage> params; 6910 CHECK(msg->findMessage("params", ¶ms)); 6911 6912 status_t err = mCodec->setParameters(params); 6913 6914 sp<AMessage> reply; 6915 if (msg->findMessage("reply", &reply)) { 6916 reply->setInt32("err", err); 6917 reply->post(); 6918 } 6919 6920 handled = true; 6921 break; 6922 } 6923 6924 case ACodec::kWhatSignalEndOfInputStream: 6925 { 6926 mCodec->onSignalEndOfInputStream(); 6927 handled = true; 6928 break; 6929 } 6930 6931 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 6932 case kWhatSubmitOutputMetadataBufferIfEOS: 6933 { 6934 if (mCodec->mPortEOS[kPortIndexInput] && 6935 !mCodec->mPortEOS[kPortIndexOutput]) { 6936 status_t err = mCodec->submitOutputMetadataBuffer(); 6937 if (err == OK) { 6938 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 6939 } 6940 } 6941 return true; 6942 } 6943 6944 default: 6945 handled = BaseState::onMessageReceived(msg); 6946 break; 6947 } 6948 6949 return handled; 6950} 6951 6952status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 6953 int32_t videoBitrate; 6954 if (params->findInt32("video-bitrate", &videoBitrate)) { 6955 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 6956 InitOMXParams(&configParams); 6957 configParams.nPortIndex = kPortIndexOutput; 6958 configParams.nEncodeBitrate = videoBitrate; 6959 6960 status_t err = mOMX->setConfig( 6961 mNode, 6962 OMX_IndexConfigVideoBitrate, 6963 &configParams, 6964 sizeof(configParams)); 6965 6966 if (err != OK) { 6967 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 6968 videoBitrate, err); 6969 6970 return err; 6971 } 6972 } 6973 6974 int64_t skipFramesBeforeUs; 6975 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 6976 status_t err = 6977 mOMX->setInternalOption( 6978 mNode, 6979 kPortIndexInput, 6980 IOMX::INTERNAL_OPTION_START_TIME, 6981 &skipFramesBeforeUs, 6982 sizeof(skipFramesBeforeUs)); 6983 6984 if (err != OK) { 6985 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 6986 return err; 6987 } 6988 } 6989 6990 int32_t dropInputFrames; 6991 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 6992 bool suspend = dropInputFrames != 0; 6993 6994 status_t err = 6995 mOMX->setInternalOption( 6996 mNode, 6997 kPortIndexInput, 6998 IOMX::INTERNAL_OPTION_SUSPEND, 6999 &suspend, 7000 sizeof(suspend)); 7001 7002 if (err != OK) { 7003 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 7004 return err; 7005 } 7006 } 7007 7008 int32_t dummy; 7009 if (params->findInt32("request-sync", &dummy)) { 7010 status_t err = requestIDRFrame(); 7011 7012 if (err != OK) { 7013 ALOGE("Requesting a sync frame failed w/ err %d", err); 7014 return err; 7015 } 7016 } 7017 7018 float rate; 7019 if (params->findFloat("operating-rate", &rate) && rate > 0) { 7020 status_t err = setOperatingRate(rate, mIsVideo); 7021 if (err != OK) { 7022 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 7023 return err; 7024 } 7025 } 7026 7027 int32_t intraRefreshPeriod = 0; 7028 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 7029 && intraRefreshPeriod > 0) { 7030 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 7031 if (err != OK) { 7032 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 7033 mComponentName.c_str()); 7034 err = OK; 7035 } 7036 } 7037 7038 return OK; 7039} 7040 7041void ACodec::onSignalEndOfInputStream() { 7042 sp<AMessage> notify = mNotify->dup(); 7043 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 7044 7045 status_t err = mOMX->signalEndOfInputStream(mNode); 7046 if (err != OK) { 7047 notify->setInt32("err", err); 7048 } 7049 notify->post(); 7050} 7051 7052bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 7053 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7054 return true; 7055} 7056 7057bool ACodec::ExecutingState::onOMXEvent( 7058 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7059 switch (event) { 7060 case OMX_EventPortSettingsChanged: 7061 { 7062 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 7063 7064 mCodec->onOutputFormatChanged(); 7065 7066 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 7067 mCodec->mMetadataBuffersToSubmit = 0; 7068 CHECK_EQ(mCodec->mOMX->sendCommand( 7069 mCodec->mNode, 7070 OMX_CommandPortDisable, kPortIndexOutput), 7071 (status_t)OK); 7072 7073 mCodec->freeOutputBuffersNotOwnedByComponent(); 7074 7075 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7076 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7077 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7078 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7079 mCodec->mComponentName.c_str(), data2); 7080 } 7081 7082 return true; 7083 } 7084 7085 case OMX_EventBufferFlag: 7086 { 7087 return true; 7088 } 7089 7090 default: 7091 return BaseState::onOMXEvent(event, data1, data2); 7092 } 7093} 7094 7095//////////////////////////////////////////////////////////////////////////////// 7096 7097ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7098 ACodec *codec) 7099 : BaseState(codec) { 7100} 7101 7102ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7103 OMX_U32 portIndex) { 7104 if (portIndex == kPortIndexOutput) { 7105 return FREE_BUFFERS; 7106 } 7107 7108 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7109 7110 return RESUBMIT_BUFFERS; 7111} 7112 7113bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7114 const sp<AMessage> &msg) { 7115 bool handled = false; 7116 7117 switch (msg->what()) { 7118 case kWhatFlush: 7119 case kWhatShutdown: 7120 case kWhatResume: 7121 case kWhatSetParameters: 7122 { 7123 if (msg->what() == kWhatResume) { 7124 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7125 } 7126 7127 mCodec->deferMessage(msg); 7128 handled = true; 7129 break; 7130 } 7131 7132 default: 7133 handled = BaseState::onMessageReceived(msg); 7134 break; 7135 } 7136 7137 return handled; 7138} 7139 7140void ACodec::OutputPortSettingsChangedState::stateEntered() { 7141 ALOGV("[%s] Now handling output port settings change", 7142 mCodec->mComponentName.c_str()); 7143} 7144 7145bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7146 int64_t mediaTimeUs, nsecs_t systemNano) { 7147 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7148 return true; 7149} 7150 7151bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7152 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7153 switch (event) { 7154 case OMX_EventCmdComplete: 7155 { 7156 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7157 if (data2 != (OMX_U32)kPortIndexOutput) { 7158 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7159 return false; 7160 } 7161 7162 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7163 7164 status_t err = OK; 7165 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7166 ALOGE("disabled port should be empty, but has %zu buffers", 7167 mCodec->mBuffers[kPortIndexOutput].size()); 7168 err = FAILED_TRANSACTION; 7169 } else { 7170 mCodec->mDealer[kPortIndexOutput].clear(); 7171 } 7172 7173 if (err == OK) { 7174 err = mCodec->mOMX->sendCommand( 7175 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput); 7176 } 7177 7178 if (err == OK) { 7179 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7180 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7181 "reconfiguration: (%d)", err); 7182 } 7183 7184 if (err != OK) { 7185 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7186 7187 // This is technically not correct, but appears to be 7188 // the only way to free the component instance. 7189 // Controlled transitioning from excecuting->idle 7190 // and idle->loaded seem impossible probably because 7191 // the output port never finishes re-enabling. 7192 mCodec->mShutdownInProgress = true; 7193 mCodec->mKeepComponentAllocated = false; 7194 mCodec->changeState(mCodec->mLoadedState); 7195 } 7196 7197 return true; 7198 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7199 if (data2 != (OMX_U32)kPortIndexOutput) { 7200 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7201 return false; 7202 } 7203 7204 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7205 7206 if (mCodec->mExecutingState->active()) { 7207 mCodec->mExecutingState->submitOutputBuffers(); 7208 } 7209 7210 mCodec->changeState(mCodec->mExecutingState); 7211 7212 return true; 7213 } 7214 7215 return false; 7216 } 7217 7218 default: 7219 return false; 7220 } 7221} 7222 7223//////////////////////////////////////////////////////////////////////////////// 7224 7225ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7226 : BaseState(codec), 7227 mComponentNowIdle(false) { 7228} 7229 7230bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7231 bool handled = false; 7232 7233 switch (msg->what()) { 7234 case kWhatFlush: 7235 { 7236 // Don't send me a flush request if you previously wanted me 7237 // to shutdown. 7238 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7239 break; 7240 } 7241 7242 case kWhatShutdown: 7243 { 7244 // We're already doing that... 7245 7246 handled = true; 7247 break; 7248 } 7249 7250 default: 7251 handled = BaseState::onMessageReceived(msg); 7252 break; 7253 } 7254 7255 return handled; 7256} 7257 7258void ACodec::ExecutingToIdleState::stateEntered() { 7259 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7260 7261 mComponentNowIdle = false; 7262 mCodec->mLastOutputFormat.clear(); 7263} 7264 7265bool ACodec::ExecutingToIdleState::onOMXEvent( 7266 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7267 switch (event) { 7268 case OMX_EventCmdComplete: 7269 { 7270 if (data1 != (OMX_U32)OMX_CommandStateSet 7271 || data2 != (OMX_U32)OMX_StateIdle) { 7272 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7273 asString((OMX_COMMANDTYPE)data1), data1, 7274 asString((OMX_STATETYPE)data2), data2); 7275 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7276 return true; 7277 } 7278 7279 mComponentNowIdle = true; 7280 7281 changeStateIfWeOwnAllBuffers(); 7282 7283 return true; 7284 } 7285 7286 case OMX_EventPortSettingsChanged: 7287 case OMX_EventBufferFlag: 7288 { 7289 // We're shutting down and don't care about this anymore. 7290 return true; 7291 } 7292 7293 default: 7294 return BaseState::onOMXEvent(event, data1, data2); 7295 } 7296} 7297 7298void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7299 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7300 status_t err = mCodec->mOMX->sendCommand( 7301 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7302 if (err == OK) { 7303 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7304 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7305 if (err == OK) { 7306 err = err2; 7307 } 7308 } 7309 7310 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7311 && mCodec->mNativeWindow != NULL) { 7312 // We push enough 1x1 blank buffers to ensure that one of 7313 // them has made it to the display. This allows the OMX 7314 // component teardown to zero out any protected buffers 7315 // without the risk of scanning out one of those buffers. 7316 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7317 } 7318 7319 if (err != OK) { 7320 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7321 return; 7322 } 7323 7324 mCodec->changeState(mCodec->mIdleToLoadedState); 7325 } 7326} 7327 7328void ACodec::ExecutingToIdleState::onInputBufferFilled( 7329 const sp<AMessage> &msg) { 7330 BaseState::onInputBufferFilled(msg); 7331 7332 changeStateIfWeOwnAllBuffers(); 7333} 7334 7335void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7336 const sp<AMessage> &msg) { 7337 BaseState::onOutputBufferDrained(msg); 7338 7339 changeStateIfWeOwnAllBuffers(); 7340} 7341 7342//////////////////////////////////////////////////////////////////////////////// 7343 7344ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7345 : BaseState(codec) { 7346} 7347 7348bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7349 bool handled = false; 7350 7351 switch (msg->what()) { 7352 case kWhatShutdown: 7353 { 7354 // We're already doing that... 7355 7356 handled = true; 7357 break; 7358 } 7359 7360 case kWhatFlush: 7361 { 7362 // Don't send me a flush request if you previously wanted me 7363 // to shutdown. 7364 ALOGE("Got flush request in IdleToLoadedState"); 7365 break; 7366 } 7367 7368 default: 7369 handled = BaseState::onMessageReceived(msg); 7370 break; 7371 } 7372 7373 return handled; 7374} 7375 7376void ACodec::IdleToLoadedState::stateEntered() { 7377 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7378} 7379 7380bool ACodec::IdleToLoadedState::onOMXEvent( 7381 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7382 switch (event) { 7383 case OMX_EventCmdComplete: 7384 { 7385 if (data1 != (OMX_U32)OMX_CommandStateSet 7386 || data2 != (OMX_U32)OMX_StateLoaded) { 7387 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7388 asString((OMX_COMMANDTYPE)data1), data1, 7389 asString((OMX_STATETYPE)data2), data2); 7390 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7391 return true; 7392 } 7393 7394 mCodec->changeState(mCodec->mLoadedState); 7395 7396 return true; 7397 } 7398 7399 default: 7400 return BaseState::onOMXEvent(event, data1, data2); 7401 } 7402} 7403 7404//////////////////////////////////////////////////////////////////////////////// 7405 7406ACodec::FlushingState::FlushingState(ACodec *codec) 7407 : BaseState(codec) { 7408} 7409 7410void ACodec::FlushingState::stateEntered() { 7411 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7412 7413 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7414} 7415 7416bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7417 bool handled = false; 7418 7419 switch (msg->what()) { 7420 case kWhatShutdown: 7421 { 7422 mCodec->deferMessage(msg); 7423 break; 7424 } 7425 7426 case kWhatFlush: 7427 { 7428 // We're already doing this right now. 7429 handled = true; 7430 break; 7431 } 7432 7433 default: 7434 handled = BaseState::onMessageReceived(msg); 7435 break; 7436 } 7437 7438 return handled; 7439} 7440 7441bool ACodec::FlushingState::onOMXEvent( 7442 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7443 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7444 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7445 7446 switch (event) { 7447 case OMX_EventCmdComplete: 7448 { 7449 if (data1 != (OMX_U32)OMX_CommandFlush) { 7450 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7451 asString((OMX_COMMANDTYPE)data1), data1, data2); 7452 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7453 return true; 7454 } 7455 7456 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7457 if (mFlushComplete[data2]) { 7458 ALOGW("Flush already completed for %s port", 7459 data2 == kPortIndexInput ? "input" : "output"); 7460 return true; 7461 } 7462 mFlushComplete[data2] = true; 7463 7464 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7465 changeStateIfWeOwnAllBuffers(); 7466 } 7467 } else if (data2 == OMX_ALL) { 7468 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7469 ALOGW("received flush complete event for OMX_ALL before ports have been" 7470 "flushed (%d/%d)", 7471 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7472 return false; 7473 } 7474 7475 changeStateIfWeOwnAllBuffers(); 7476 } else { 7477 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7478 } 7479 7480 return true; 7481 } 7482 7483 case OMX_EventPortSettingsChanged: 7484 { 7485 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7486 msg->setInt32("type", omx_message::EVENT); 7487 msg->setInt32("node", mCodec->mNode); 7488 msg->setInt32("event", event); 7489 msg->setInt32("data1", data1); 7490 msg->setInt32("data2", data2); 7491 7492 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7493 mCodec->mComponentName.c_str()); 7494 7495 mCodec->deferMessage(msg); 7496 7497 return true; 7498 } 7499 7500 default: 7501 return BaseState::onOMXEvent(event, data1, data2); 7502 } 7503 7504 return true; 7505} 7506 7507void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7508 BaseState::onOutputBufferDrained(msg); 7509 7510 changeStateIfWeOwnAllBuffers(); 7511} 7512 7513void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7514 BaseState::onInputBufferFilled(msg); 7515 7516 changeStateIfWeOwnAllBuffers(); 7517} 7518 7519void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 7520 if (mFlushComplete[kPortIndexInput] 7521 && mFlushComplete[kPortIndexOutput] 7522 && mCodec->allYourBuffersAreBelongToUs()) { 7523 // We now own all buffers except possibly those still queued with 7524 // the native window for rendering. Let's get those back as well. 7525 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 7526 7527 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7528 7529 sp<AMessage> notify = mCodec->mNotify->dup(); 7530 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7531 notify->post(); 7532 7533 mCodec->mPortEOS[kPortIndexInput] = 7534 mCodec->mPortEOS[kPortIndexOutput] = false; 7535 7536 mCodec->mInputEOSResult = OK; 7537 7538 if (mCodec->mSkipCutBuffer != NULL) { 7539 mCodec->mSkipCutBuffer->clear(); 7540 } 7541 7542 mCodec->changeState(mCodec->mExecutingState); 7543 } 7544} 7545 7546status_t ACodec::queryCapabilities( 7547 const AString &name, const AString &mime, bool isEncoder, 7548 sp<MediaCodecInfo::Capabilities> *caps) { 7549 (*caps).clear(); 7550 const char *role = GetComponentRole(isEncoder, mime.c_str()); 7551 if (role == NULL) { 7552 return BAD_VALUE; 7553 } 7554 7555 OMXClient client; 7556 status_t err = client.connect(); 7557 if (err != OK) { 7558 return err; 7559 } 7560 7561 sp<IOMX> omx = client.interface(); 7562 sp<CodecObserver> observer = new CodecObserver; 7563 IOMX::node_id node = 0; 7564 7565 err = omx->allocateNode(name.c_str(), observer, NULL, &node); 7566 if (err != OK) { 7567 client.disconnect(); 7568 return err; 7569 } 7570 7571 err = SetComponentRole(omx, node, role); 7572 if (err != OK) { 7573 omx->freeNode(node); 7574 client.disconnect(); 7575 return err; 7576 } 7577 7578 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 7579 bool isVideo = mime.startsWithIgnoreCase("video/"); 7580 7581 if (isVideo) { 7582 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 7583 InitOMXParams(¶m); 7584 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7585 7586 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7587 param.nProfileIndex = index; 7588 status_t err = omx->getParameter( 7589 node, OMX_IndexParamVideoProfileLevelQuerySupported, 7590 ¶m, sizeof(param)); 7591 if (err != OK) { 7592 break; 7593 } 7594 builder->addProfileLevel(param.eProfile, param.eLevel); 7595 7596 if (index == kMaxIndicesToCheck) { 7597 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 7598 name.c_str(), index, 7599 param.eProfile, param.eLevel); 7600 } 7601 } 7602 7603 // Color format query 7604 // return colors in the order reported by the OMX component 7605 // prefix "flexible" standard ones with the flexible equivalent 7606 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 7607 InitOMXParams(&portFormat); 7608 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 7609 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 7610 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7611 portFormat.nIndex = index; 7612 status_t err = omx->getParameter( 7613 node, OMX_IndexParamVideoPortFormat, 7614 &portFormat, sizeof(portFormat)); 7615 if (err != OK) { 7616 break; 7617 } 7618 7619 OMX_U32 flexibleEquivalent; 7620 if (IsFlexibleColorFormat( 7621 omx, node, portFormat.eColorFormat, false /* usingNativeWindow */, 7622 &flexibleEquivalent)) { 7623 bool marked = false; 7624 for (size_t i = 0; i < supportedColors.size(); ++i) { 7625 if (supportedColors[i] == flexibleEquivalent) { 7626 marked = true; 7627 break; 7628 } 7629 } 7630 if (!marked) { 7631 supportedColors.push(flexibleEquivalent); 7632 builder->addColorFormat(flexibleEquivalent); 7633 } 7634 } 7635 supportedColors.push(portFormat.eColorFormat); 7636 builder->addColorFormat(portFormat.eColorFormat); 7637 7638 if (index == kMaxIndicesToCheck) { 7639 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 7640 name.c_str(), index, 7641 asString(portFormat.eColorFormat), portFormat.eColorFormat); 7642 } 7643 } 7644 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 7645 // More audio codecs if they have profiles. 7646 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 7647 InitOMXParams(¶m); 7648 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7649 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7650 param.nProfileIndex = index; 7651 status_t err = omx->getParameter( 7652 node, (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 7653 ¶m, sizeof(param)); 7654 if (err != OK) { 7655 break; 7656 } 7657 // For audio, level is ignored. 7658 builder->addProfileLevel(param.eProfile, 0 /* level */); 7659 7660 if (index == kMaxIndicesToCheck) { 7661 ALOGW("[%s] stopping checking profiles after %u: %x", 7662 name.c_str(), index, 7663 param.eProfile); 7664 } 7665 } 7666 7667 // NOTE: Without Android extensions, OMX does not provide a way to query 7668 // AAC profile support 7669 if (param.nProfileIndex == 0) { 7670 ALOGW("component %s doesn't support profile query.", name.c_str()); 7671 } 7672 } 7673 7674 if (isVideo && !isEncoder) { 7675 native_handle_t *sidebandHandle = NULL; 7676 if (omx->configureVideoTunnelMode( 7677 node, kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 7678 // tunneled playback includes adaptive playback 7679 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 7680 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 7681 } else if (omx->storeMetaDataInBuffers( 7682 node, kPortIndexOutput, OMX_TRUE) == OK || 7683 omx->prepareForAdaptivePlayback( 7684 node, kPortIndexOutput, OMX_TRUE, 7685 1280 /* width */, 720 /* height */) == OK) { 7686 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 7687 } 7688 } 7689 7690 if (isVideo && isEncoder) { 7691 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 7692 InitOMXParams(¶ms); 7693 params.nPortIndex = kPortIndexOutput; 7694 // TODO: should we verify if fallback is supported? 7695 if (omx->getConfig( 7696 node, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 7697 ¶ms, sizeof(params)) == OK) { 7698 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 7699 } 7700 } 7701 7702 *caps = builder; 7703 omx->freeNode(node); 7704 client.disconnect(); 7705 return OK; 7706} 7707 7708// These are supposed be equivalent to the logic in 7709// "audio_channel_out_mask_from_count". 7710//static 7711status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 7712 switch (numChannels) { 7713 case 1: 7714 map[0] = OMX_AUDIO_ChannelCF; 7715 break; 7716 case 2: 7717 map[0] = OMX_AUDIO_ChannelLF; 7718 map[1] = OMX_AUDIO_ChannelRF; 7719 break; 7720 case 3: 7721 map[0] = OMX_AUDIO_ChannelLF; 7722 map[1] = OMX_AUDIO_ChannelRF; 7723 map[2] = OMX_AUDIO_ChannelCF; 7724 break; 7725 case 4: 7726 map[0] = OMX_AUDIO_ChannelLF; 7727 map[1] = OMX_AUDIO_ChannelRF; 7728 map[2] = OMX_AUDIO_ChannelLR; 7729 map[3] = OMX_AUDIO_ChannelRR; 7730 break; 7731 case 5: 7732 map[0] = OMX_AUDIO_ChannelLF; 7733 map[1] = OMX_AUDIO_ChannelRF; 7734 map[2] = OMX_AUDIO_ChannelCF; 7735 map[3] = OMX_AUDIO_ChannelLR; 7736 map[4] = OMX_AUDIO_ChannelRR; 7737 break; 7738 case 6: 7739 map[0] = OMX_AUDIO_ChannelLF; 7740 map[1] = OMX_AUDIO_ChannelRF; 7741 map[2] = OMX_AUDIO_ChannelCF; 7742 map[3] = OMX_AUDIO_ChannelLFE; 7743 map[4] = OMX_AUDIO_ChannelLR; 7744 map[5] = OMX_AUDIO_ChannelRR; 7745 break; 7746 case 7: 7747 map[0] = OMX_AUDIO_ChannelLF; 7748 map[1] = OMX_AUDIO_ChannelRF; 7749 map[2] = OMX_AUDIO_ChannelCF; 7750 map[3] = OMX_AUDIO_ChannelLFE; 7751 map[4] = OMX_AUDIO_ChannelLR; 7752 map[5] = OMX_AUDIO_ChannelRR; 7753 map[6] = OMX_AUDIO_ChannelCS; 7754 break; 7755 case 8: 7756 map[0] = OMX_AUDIO_ChannelLF; 7757 map[1] = OMX_AUDIO_ChannelRF; 7758 map[2] = OMX_AUDIO_ChannelCF; 7759 map[3] = OMX_AUDIO_ChannelLFE; 7760 map[4] = OMX_AUDIO_ChannelLR; 7761 map[5] = OMX_AUDIO_ChannelRR; 7762 map[6] = OMX_AUDIO_ChannelLS; 7763 map[7] = OMX_AUDIO_ChannelRS; 7764 break; 7765 default: 7766 return -EINVAL; 7767 } 7768 7769 return OK; 7770} 7771 7772} // namespace android 7773