ACodec.cpp revision 697aedf61d6180e72102068a969383018eedfa1e
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47 48#include <OMX_AudioExt.h> 49#include <OMX_VideoExt.h> 50#include <OMX_Component.h> 51#include <OMX_IndexExt.h> 52#include <OMX_AsString.h> 53 54#include "include/avc_utils.h" 55#include "omx/OMXUtils.h" 56 57namespace android { 58 59enum { 60 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 61}; 62 63// OMX errors are directly mapped into status_t range if 64// there is no corresponding MediaError status code. 65// Use the statusFromOMXError(int32_t omxError) function. 66// 67// Currently this is a direct map. 68// See frameworks/native/include/media/openmax/OMX_Core.h 69// 70// Vendor OMX errors from 0x90000000 - 0x9000FFFF 71// Extension OMX errors from 0x8F000000 - 0x90000000 72// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 73// 74 75// returns true if err is a recognized OMX error code. 76// as OMX error is OMX_S32, this is an int32_t type 77static inline bool isOMXError(int32_t err) { 78 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 79} 80 81// converts an OMX error to a status_t 82static inline status_t statusFromOMXError(int32_t omxError) { 83 switch (omxError) { 84 case OMX_ErrorInvalidComponentName: 85 case OMX_ErrorComponentNotFound: 86 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 87 default: 88 return isOMXError(omxError) ? omxError : 0; // no translation required 89 } 90} 91 92// checks and converts status_t to a non-side-effect status_t 93static inline status_t makeNoSideEffectStatus(status_t err) { 94 switch (err) { 95 // the following errors have side effects and may come 96 // from other code modules. Remap for safety reasons. 97 case INVALID_OPERATION: 98 case DEAD_OBJECT: 99 return UNKNOWN_ERROR; 100 default: 101 return err; 102 } 103} 104 105struct MessageList : public RefBase { 106 MessageList() { 107 } 108 virtual ~MessageList() { 109 } 110 std::list<sp<AMessage> > &getList() { return mList; } 111private: 112 std::list<sp<AMessage> > mList; 113 114 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 115}; 116 117struct CodecObserver : public BnOMXObserver { 118 CodecObserver() {} 119 120 void setNotificationMessage(const sp<AMessage> &msg) { 121 mNotify = msg; 122 } 123 124 // from IOMXObserver 125 virtual void onMessages(const std::list<omx_message> &messages) { 126 if (messages.empty()) { 127 return; 128 } 129 130 sp<AMessage> notify = mNotify->dup(); 131 bool first = true; 132 sp<MessageList> msgList = new MessageList(); 133 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 134 it != messages.cend(); ++it) { 135 const omx_message &omx_msg = *it; 136 if (first) { 137 notify->setInt32("node", omx_msg.node); 138 first = false; 139 } 140 141 sp<AMessage> msg = new AMessage; 142 msg->setInt32("type", omx_msg.type); 143 switch (omx_msg.type) { 144 case omx_message::EVENT: 145 { 146 msg->setInt32("event", omx_msg.u.event_data.event); 147 msg->setInt32("data1", omx_msg.u.event_data.data1); 148 msg->setInt32("data2", omx_msg.u.event_data.data2); 149 break; 150 } 151 152 case omx_message::EMPTY_BUFFER_DONE: 153 { 154 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 155 msg->setInt32("fence_fd", omx_msg.fenceFd); 156 break; 157 } 158 159 case omx_message::FILL_BUFFER_DONE: 160 { 161 msg->setInt32( 162 "buffer", omx_msg.u.extended_buffer_data.buffer); 163 msg->setInt32( 164 "range_offset", 165 omx_msg.u.extended_buffer_data.range_offset); 166 msg->setInt32( 167 "range_length", 168 omx_msg.u.extended_buffer_data.range_length); 169 msg->setInt32( 170 "flags", 171 omx_msg.u.extended_buffer_data.flags); 172 msg->setInt64( 173 "timestamp", 174 omx_msg.u.extended_buffer_data.timestamp); 175 msg->setInt32( 176 "fence_fd", omx_msg.fenceFd); 177 break; 178 } 179 180 case omx_message::FRAME_RENDERED: 181 { 182 msg->setInt64( 183 "media_time_us", omx_msg.u.render_data.timestamp); 184 msg->setInt64( 185 "system_nano", omx_msg.u.render_data.nanoTime); 186 break; 187 } 188 189 default: 190 ALOGE("Unrecognized message type: %d", omx_msg.type); 191 break; 192 } 193 msgList->getList().push_back(msg); 194 } 195 notify->setObject("messages", msgList); 196 notify->post(); 197 } 198 199protected: 200 virtual ~CodecObserver() {} 201 202private: 203 sp<AMessage> mNotify; 204 205 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 206}; 207 208//////////////////////////////////////////////////////////////////////////////// 209 210struct ACodec::BaseState : public AState { 211 BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 212 213protected: 214 enum PortMode { 215 KEEP_BUFFERS, 216 RESUBMIT_BUFFERS, 217 FREE_BUFFERS, 218 }; 219 220 ACodec *mCodec; 221 222 virtual PortMode getPortMode(OMX_U32 portIndex); 223 224 virtual bool onMessageReceived(const sp<AMessage> &msg); 225 226 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 227 228 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 229 virtual void onInputBufferFilled(const sp<AMessage> &msg); 230 231 void postFillThisBuffer(BufferInfo *info); 232 233private: 234 // Handles an OMX message. Returns true iff message was handled. 235 bool onOMXMessage(const sp<AMessage> &msg); 236 237 // Handles a list of messages. Returns true iff messages were handled. 238 bool onOMXMessageList(const sp<AMessage> &msg); 239 240 // returns true iff this message is for this component and the component is alive 241 bool checkOMXMessage(const sp<AMessage> &msg); 242 243 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 244 245 bool onOMXFillBufferDone( 246 IOMX::buffer_id bufferID, 247 size_t rangeOffset, size_t rangeLength, 248 OMX_U32 flags, 249 int64_t timeUs, 250 int fenceFd); 251 252 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 253 254 void getMoreInputDataIfPossible(); 255 256 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 257}; 258 259//////////////////////////////////////////////////////////////////////////////// 260 261struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 262 DeathNotifier(const sp<AMessage> ¬ify) 263 : mNotify(notify) { 264 } 265 266 virtual void binderDied(const wp<IBinder> &) { 267 mNotify->post(); 268 } 269 270protected: 271 virtual ~DeathNotifier() {} 272 273private: 274 sp<AMessage> mNotify; 275 276 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 277}; 278 279struct ACodec::UninitializedState : public ACodec::BaseState { 280 UninitializedState(ACodec *codec); 281 282protected: 283 virtual bool onMessageReceived(const sp<AMessage> &msg); 284 virtual void stateEntered(); 285 286private: 287 void onSetup(const sp<AMessage> &msg); 288 bool onAllocateComponent(const sp<AMessage> &msg); 289 290 sp<DeathNotifier> mDeathNotifier; 291 292 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 293}; 294 295//////////////////////////////////////////////////////////////////////////////// 296 297struct ACodec::LoadedState : public ACodec::BaseState { 298 LoadedState(ACodec *codec); 299 300protected: 301 virtual bool onMessageReceived(const sp<AMessage> &msg); 302 virtual void stateEntered(); 303 304private: 305 friend struct ACodec::UninitializedState; 306 307 bool onConfigureComponent(const sp<AMessage> &msg); 308 void onCreateInputSurface(const sp<AMessage> &msg); 309 void onSetInputSurface(const sp<AMessage> &msg); 310 void onStart(); 311 void onShutdown(bool keepComponentAllocated); 312 313 status_t setupInputSurface(); 314 315 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 316}; 317 318//////////////////////////////////////////////////////////////////////////////// 319 320struct ACodec::LoadedToIdleState : public ACodec::BaseState { 321 LoadedToIdleState(ACodec *codec); 322 323protected: 324 virtual bool onMessageReceived(const sp<AMessage> &msg); 325 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 326 virtual void stateEntered(); 327 328private: 329 status_t allocateBuffers(); 330 331 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 332}; 333 334//////////////////////////////////////////////////////////////////////////////// 335 336struct ACodec::IdleToExecutingState : public ACodec::BaseState { 337 IdleToExecutingState(ACodec *codec); 338 339protected: 340 virtual bool onMessageReceived(const sp<AMessage> &msg); 341 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 342 virtual void stateEntered(); 343 344private: 345 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 346}; 347 348//////////////////////////////////////////////////////////////////////////////// 349 350struct ACodec::ExecutingState : public ACodec::BaseState { 351 ExecutingState(ACodec *codec); 352 353 void submitRegularOutputBuffers(); 354 void submitOutputMetaBuffers(); 355 void submitOutputBuffers(); 356 357 // Submit output buffers to the decoder, submit input buffers to client 358 // to fill with data. 359 void resume(); 360 361 // Returns true iff input and output buffers are in play. 362 bool active() const { return mActive; } 363 364protected: 365 virtual PortMode getPortMode(OMX_U32 portIndex); 366 virtual bool onMessageReceived(const sp<AMessage> &msg); 367 virtual void stateEntered(); 368 369 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 370 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 371 372private: 373 bool mActive; 374 375 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 376}; 377 378//////////////////////////////////////////////////////////////////////////////// 379 380struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 381 OutputPortSettingsChangedState(ACodec *codec); 382 383protected: 384 virtual PortMode getPortMode(OMX_U32 portIndex); 385 virtual bool onMessageReceived(const sp<AMessage> &msg); 386 virtual void stateEntered(); 387 388 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 389 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 390 391private: 392 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 393}; 394 395//////////////////////////////////////////////////////////////////////////////// 396 397struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 398 ExecutingToIdleState(ACodec *codec); 399 400protected: 401 virtual bool onMessageReceived(const sp<AMessage> &msg); 402 virtual void stateEntered(); 403 404 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 405 406 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 407 virtual void onInputBufferFilled(const sp<AMessage> &msg); 408 409private: 410 void changeStateIfWeOwnAllBuffers(); 411 412 bool mComponentNowIdle; 413 414 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 415}; 416 417//////////////////////////////////////////////////////////////////////////////// 418 419struct ACodec::IdleToLoadedState : public ACodec::BaseState { 420 IdleToLoadedState(ACodec *codec); 421 422protected: 423 virtual bool onMessageReceived(const sp<AMessage> &msg); 424 virtual void stateEntered(); 425 426 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 427 428private: 429 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 430}; 431 432//////////////////////////////////////////////////////////////////////////////// 433 434struct ACodec::FlushingState : public ACodec::BaseState { 435 FlushingState(ACodec *codec); 436 437protected: 438 virtual bool onMessageReceived(const sp<AMessage> &msg); 439 virtual void stateEntered(); 440 441 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 442 443 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 444 virtual void onInputBufferFilled(const sp<AMessage> &msg); 445 446private: 447 bool mFlushComplete[2]; 448 449 void changeStateIfWeOwnAllBuffers(); 450 451 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 452}; 453 454//////////////////////////////////////////////////////////////////////////////// 455 456void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 457 if (mFenceFd >= 0) { 458 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 459 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 460 } 461 mFenceFd = fenceFd; 462 mIsReadFence = false; 463} 464 465void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 466 if (mFenceFd >= 0) { 467 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 468 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 469 } 470 mFenceFd = fenceFd; 471 mIsReadFence = true; 472} 473 474void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 475 if (mFenceFd >= 0 && mIsReadFence) { 476 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 477 } 478} 479 480void ACodec::BufferInfo::checkReadFence(const char *dbg) { 481 if (mFenceFd >= 0 && !mIsReadFence) { 482 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 483 } 484} 485 486//////////////////////////////////////////////////////////////////////////////// 487 488ACodec::ACodec() 489 : mQuirks(0), 490 mNode(0), 491 mUsingNativeWindow(false), 492 mNativeWindowUsageBits(0), 493 mIsVideo(false), 494 mIsEncoder(false), 495 mFatalError(false), 496 mShutdownInProgress(false), 497 mExplicitShutdown(false), 498 mEncoderDelay(0), 499 mEncoderPadding(0), 500 mRotationDegrees(0), 501 mChannelMaskPresent(false), 502 mChannelMask(0), 503 mDequeueCounter(0), 504 mInputMetadataType(kMetadataBufferTypeInvalid), 505 mOutputMetadataType(kMetadataBufferTypeInvalid), 506 mLegacyAdaptiveExperiment(false), 507 mMetadataBuffersToSubmit(0), 508 mRepeatFrameDelayUs(-1ll), 509 mMaxPtsGapUs(-1ll), 510 mMaxFps(-1), 511 mTimePerFrameUs(-1ll), 512 mTimePerCaptureUs(-1ll), 513 mCreateInputBuffersSuspended(false), 514 mTunneled(false), 515 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0) { 516 mUninitializedState = new UninitializedState(this); 517 mLoadedState = new LoadedState(this); 518 mLoadedToIdleState = new LoadedToIdleState(this); 519 mIdleToExecutingState = new IdleToExecutingState(this); 520 mExecutingState = new ExecutingState(this); 521 522 mOutputPortSettingsChangedState = 523 new OutputPortSettingsChangedState(this); 524 525 mExecutingToIdleState = new ExecutingToIdleState(this); 526 mIdleToLoadedState = new IdleToLoadedState(this); 527 mFlushingState = new FlushingState(this); 528 529 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 530 mInputEOSResult = OK; 531 532 changeState(mUninitializedState); 533} 534 535ACodec::~ACodec() { 536} 537 538void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 539 mNotify = msg; 540} 541 542void ACodec::initiateSetup(const sp<AMessage> &msg) { 543 msg->setWhat(kWhatSetup); 544 msg->setTarget(this); 545 msg->post(); 546} 547 548void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 549 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 550 msg->setMessage("params", params); 551 msg->post(); 552} 553 554void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 555 msg->setWhat(kWhatAllocateComponent); 556 msg->setTarget(this); 557 msg->post(); 558} 559 560void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 561 msg->setWhat(kWhatConfigureComponent); 562 msg->setTarget(this); 563 msg->post(); 564} 565 566status_t ACodec::setSurface(const sp<Surface> &surface) { 567 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 568 msg->setObject("surface", surface); 569 570 sp<AMessage> response; 571 status_t err = msg->postAndAwaitResponse(&response); 572 573 if (err == OK) { 574 (void)response->findInt32("err", &err); 575 } 576 return err; 577} 578 579void ACodec::initiateCreateInputSurface() { 580 (new AMessage(kWhatCreateInputSurface, this))->post(); 581} 582 583void ACodec::initiateSetInputSurface( 584 const sp<PersistentSurface> &surface) { 585 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 586 msg->setObject("input-surface", surface); 587 msg->post(); 588} 589 590void ACodec::signalEndOfInputStream() { 591 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 592} 593 594void ACodec::initiateStart() { 595 (new AMessage(kWhatStart, this))->post(); 596} 597 598void ACodec::signalFlush() { 599 ALOGV("[%s] signalFlush", mComponentName.c_str()); 600 (new AMessage(kWhatFlush, this))->post(); 601} 602 603void ACodec::signalResume() { 604 (new AMessage(kWhatResume, this))->post(); 605} 606 607void ACodec::initiateShutdown(bool keepComponentAllocated) { 608 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 609 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 610 msg->post(); 611 if (!keepComponentAllocated) { 612 // ensure shutdown completes in 3 seconds 613 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 614 } 615} 616 617void ACodec::signalRequestIDRFrame() { 618 (new AMessage(kWhatRequestIDRFrame, this))->post(); 619} 620 621// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 622// Some codecs may return input buffers before having them processed. 623// This causes a halt if we already signaled an EOS on the input 624// port. For now keep submitting an output buffer if there was an 625// EOS on the input port, but not yet on the output port. 626void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 627 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 628 mMetadataBuffersToSubmit > 0) { 629 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 630 } 631} 632 633status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 634 // allow keeping unset surface 635 if (surface == NULL) { 636 if (mNativeWindow != NULL) { 637 ALOGW("cannot unset a surface"); 638 return INVALID_OPERATION; 639 } 640 return OK; 641 } 642 643 // cannot switch from bytebuffers to surface 644 if (mNativeWindow == NULL) { 645 ALOGW("component was not configured with a surface"); 646 return INVALID_OPERATION; 647 } 648 649 ANativeWindow *nativeWindow = surface.get(); 650 // if we have not yet started the codec, we can simply set the native window 651 if (mBuffers[kPortIndexInput].size() == 0) { 652 mNativeWindow = surface; 653 return OK; 654 } 655 656 // we do not support changing a tunneled surface after start 657 if (mTunneled) { 658 ALOGW("cannot change tunneled surface"); 659 return INVALID_OPERATION; 660 } 661 662 int usageBits = 0; 663 status_t err = setupNativeWindowSizeFormatAndUsage(nativeWindow, &usageBits); 664 if (err != OK) { 665 return err; 666 } 667 668 int ignoredFlags = kVideoGrallocUsage; 669 // New output surface is not allowed to add new usage flag except ignored ones. 670 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 671 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 672 return BAD_VALUE; 673 } 674 675 // get min undequeued count. We cannot switch to a surface that has a higher 676 // undequeued count than we allocated. 677 int minUndequeuedBuffers = 0; 678 err = nativeWindow->query( 679 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 680 &minUndequeuedBuffers); 681 if (err != 0) { 682 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 683 strerror(-err), -err); 684 return err; 685 } 686 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 687 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 688 minUndequeuedBuffers, mNumUndequeuedBuffers); 689 return BAD_VALUE; 690 } 691 692 // we cannot change the number of output buffers while OMX is running 693 // set up surface to the same count 694 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 695 ALOGV("setting up surface for %zu buffers", buffers.size()); 696 697 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 698 if (err != 0) { 699 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 700 -err); 701 return err; 702 } 703 704 // need to enable allocation when attaching 705 surface->getIGraphicBufferProducer()->allowAllocation(true); 706 707 // for meta data mode, we move dequeud buffers to the new surface. 708 // for non-meta mode, we must move all registered buffers 709 for (size_t i = 0; i < buffers.size(); ++i) { 710 const BufferInfo &info = buffers[i]; 711 // skip undequeued buffers for meta data mode 712 if (storingMetadataInDecodedBuffers() 713 && !mLegacyAdaptiveExperiment 714 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 715 ALOGV("skipping buffer %p", info.mGraphicBuffer->getNativeBuffer()); 716 continue; 717 } 718 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 719 720 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 721 if (err != OK) { 722 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 723 info.mGraphicBuffer->getNativeBuffer(), 724 strerror(-err), -err); 725 return err; 726 } 727 } 728 729 // cancel undequeued buffers to new surface 730 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 731 for (size_t i = 0; i < buffers.size(); ++i) { 732 BufferInfo &info = buffers.editItemAt(i); 733 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 734 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 735 err = nativeWindow->cancelBuffer( 736 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 737 info.mFenceFd = -1; 738 if (err != OK) { 739 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 740 info.mGraphicBuffer->getNativeBuffer(), 741 strerror(-err), -err); 742 return err; 743 } 744 } 745 } 746 // disallow further allocation 747 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 748 } 749 750 // push blank buffers to previous window if requested 751 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 752 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 753 } 754 755 mNativeWindow = nativeWindow; 756 mNativeWindowUsageBits = usageBits; 757 return OK; 758} 759 760status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 761 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 762 763 CHECK(mDealer[portIndex] == NULL); 764 CHECK(mBuffers[portIndex].isEmpty()); 765 766 status_t err; 767 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 768 if (storingMetadataInDecodedBuffers()) { 769 err = allocateOutputMetadataBuffers(); 770 } else { 771 err = allocateOutputBuffersFromNativeWindow(); 772 } 773 } else { 774 OMX_PARAM_PORTDEFINITIONTYPE def; 775 InitOMXParams(&def); 776 def.nPortIndex = portIndex; 777 778 err = mOMX->getParameter( 779 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 780 781 if (err == OK) { 782 MetadataBufferType type = 783 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 784 int32_t bufSize = def.nBufferSize; 785 if (type == kMetadataBufferTypeGrallocSource) { 786 bufSize = sizeof(VideoGrallocMetadata); 787 } else if (type == kMetadataBufferTypeANWBuffer) { 788 bufSize = sizeof(VideoNativeMetadata); 789 } 790 791 // If using gralloc or native source input metadata buffers, allocate largest 792 // metadata size as we prefer to generate native source metadata, but component 793 // may require gralloc source. For camera source, allocate at least enough 794 // size for native metadata buffers. 795 int32_t allottedSize = bufSize; 796 if (portIndex == kPortIndexInput && type >= kMetadataBufferTypeGrallocSource) { 797 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 798 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 799 bufSize = max(bufSize, (int32_t)sizeof(VideoNativeMetadata)); 800 } 801 802 size_t alignment = MemoryDealer::getAllocationAlignment(); 803 804 ALOGV("[%s] Allocating %u buffers of size %d/%d (from %u using %s) on %s port", 805 mComponentName.c_str(), 806 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 807 portIndex == kPortIndexInput ? "input" : "output"); 808 809 if (bufSize == 0 || bufSize > kMaxCodecBufferSize) { 810 ALOGE("b/22885421"); 811 return NO_MEMORY; 812 } 813 814 // don't modify bufSize as OMX may not expect it to increase after negotiation 815 size_t alignedSize = align(bufSize, alignment); 816 if (def.nBufferCountActual > SIZE_MAX / alignedSize) { 817 ALOGE("b/22885421"); 818 return NO_MEMORY; 819 } 820 821 size_t totalSize = def.nBufferCountActual * alignedSize; 822 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 823 824 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 825 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 826 if (mem == NULL || mem->pointer() == NULL) { 827 return NO_MEMORY; 828 } 829 830 BufferInfo info; 831 info.mStatus = BufferInfo::OWNED_BY_US; 832 info.mFenceFd = -1; 833 info.mRenderInfo = NULL; 834 info.mNativeHandle = NULL; 835 836 uint32_t requiresAllocateBufferBit = 837 (portIndex == kPortIndexInput) 838 ? kRequiresAllocateBufferOnInputPorts 839 : kRequiresAllocateBufferOnOutputPorts; 840 841 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 842 mem.clear(); 843 844 void *ptr = NULL; 845 native_handle_t *native_handle = NULL; 846 err = mOMX->allocateSecureBuffer( 847 mNode, portIndex, bufSize, &info.mBufferID, 848 &ptr, &native_handle); 849 850 // TRICKY: this representation is unorthodox, but ACodec requires 851 // an ABuffer with a proper size to validate range offsets and lengths. 852 // Since mData is never referenced for secure input, it is used to store 853 // either the pointer to the secure buffer, or the opaque handle as on 854 // some devices ptr is actually an opaque handle, not a pointer. 855 856 // TRICKY2: use native handle as the base of the ABuffer if received one, 857 // because Widevine source only receives these base addresses. 858 info.mData = new ABuffer(ptr != NULL ? ptr : (void *)native_handle, bufSize); 859 info.mNativeHandle = NativeHandle::create(native_handle, true /* ownsHandle */); 860 } else if (mQuirks & requiresAllocateBufferBit) { 861 err = mOMX->allocateBufferWithBackup( 862 mNode, portIndex, mem, &info.mBufferID, allottedSize); 863 } else { 864 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize); 865 } 866 867 if (mem != NULL) { 868 info.mData = new ABuffer(mem->pointer(), bufSize); 869 if (type == kMetadataBufferTypeANWBuffer) { 870 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 871 } 872 info.mMemRef = mem; 873 } 874 875 mBuffers[portIndex].push(info); 876 } 877 } 878 } 879 880 if (err != OK) { 881 return err; 882 } 883 884 sp<AMessage> notify = mNotify->dup(); 885 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 886 887 notify->setInt32("portIndex", portIndex); 888 889 sp<PortDescription> desc = new PortDescription; 890 891 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 892 const BufferInfo &info = mBuffers[portIndex][i]; 893 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef); 894 } 895 896 notify->setObject("portDesc", desc); 897 notify->post(); 898 899 return OK; 900} 901 902status_t ACodec::setupNativeWindowSizeFormatAndUsage( 903 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */) { 904 OMX_PARAM_PORTDEFINITIONTYPE def; 905 InitOMXParams(&def); 906 def.nPortIndex = kPortIndexOutput; 907 908 status_t err = mOMX->getParameter( 909 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 910 911 if (err != OK) { 912 return err; 913 } 914 915 OMX_U32 usage = 0; 916 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 917 if (err != 0) { 918 ALOGW("querying usage flags from OMX IL component failed: %d", err); 919 // XXX: Currently this error is logged, but not fatal. 920 usage = 0; 921 } 922 int omxUsage = usage; 923 924 if (mFlags & kFlagIsGrallocUsageProtected) { 925 usage |= GRALLOC_USAGE_PROTECTED; 926 } 927 928 usage |= kVideoGrallocUsage; 929 *finalUsage = usage; 930 931 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 932 return setNativeWindowSizeFormatAndUsage( 933 nativeWindow, 934 def.format.video.nFrameWidth, 935 def.format.video.nFrameHeight, 936 def.format.video.eColorFormat, 937 mRotationDegrees, 938 usage); 939} 940 941status_t ACodec::configureOutputBuffersFromNativeWindow( 942 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 943 OMX_U32 *minUndequeuedBuffers) { 944 OMX_PARAM_PORTDEFINITIONTYPE def; 945 InitOMXParams(&def); 946 def.nPortIndex = kPortIndexOutput; 947 948 status_t err = mOMX->getParameter( 949 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 950 951 if (err == OK) { 952 err = setupNativeWindowSizeFormatAndUsage(mNativeWindow.get(), &mNativeWindowUsageBits); 953 } 954 if (err != OK) { 955 mNativeWindowUsageBits = 0; 956 return err; 957 } 958 959 // Exits here for tunneled video playback codecs -- i.e. skips native window 960 // buffer allocation step as this is managed by the tunneled OMX omponent 961 // itself and explicitly sets def.nBufferCountActual to 0. 962 if (mTunneled) { 963 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 964 def.nBufferCountActual = 0; 965 err = mOMX->setParameter( 966 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 967 968 *minUndequeuedBuffers = 0; 969 *bufferCount = 0; 970 *bufferSize = 0; 971 return err; 972 } 973 974 *minUndequeuedBuffers = 0; 975 err = mNativeWindow->query( 976 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 977 (int *)minUndequeuedBuffers); 978 979 if (err != 0) { 980 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 981 strerror(-err), -err); 982 return err; 983 } 984 985 // FIXME: assume that surface is controlled by app (native window 986 // returns the number for the case when surface is not controlled by app) 987 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 988 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 989 990 // Use conservative allocation while also trying to reduce starvation 991 // 992 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 993 // minimum needed for the consumer to be able to work 994 // 2. try to allocate two (2) additional buffers to reduce starvation from 995 // the consumer 996 // plus an extra buffer to account for incorrect minUndequeuedBufs 997 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 998 OMX_U32 newBufferCount = 999 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1000 def.nBufferCountActual = newBufferCount; 1001 err = mOMX->setParameter( 1002 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1003 1004 if (err == OK) { 1005 *minUndequeuedBuffers += extraBuffers; 1006 break; 1007 } 1008 1009 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1010 mComponentName.c_str(), newBufferCount, err); 1011 /* exit condition */ 1012 if (extraBuffers == 0) { 1013 return err; 1014 } 1015 } 1016 1017 err = native_window_set_buffer_count( 1018 mNativeWindow.get(), def.nBufferCountActual); 1019 1020 if (err != 0) { 1021 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1022 -err); 1023 return err; 1024 } 1025 1026 *bufferCount = def.nBufferCountActual; 1027 *bufferSize = def.nBufferSize; 1028 return err; 1029} 1030 1031status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1032 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1033 status_t err = configureOutputBuffersFromNativeWindow( 1034 &bufferCount, &bufferSize, &minUndequeuedBuffers); 1035 if (err != 0) 1036 return err; 1037 mNumUndequeuedBuffers = minUndequeuedBuffers; 1038 1039 if (!storingMetadataInDecodedBuffers()) { 1040 static_cast<Surface*>(mNativeWindow.get()) 1041 ->getIGraphicBufferProducer()->allowAllocation(true); 1042 } 1043 1044 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1045 "output port", 1046 mComponentName.c_str(), bufferCount, bufferSize); 1047 1048 // Dequeue buffers and send them to OMX 1049 for (OMX_U32 i = 0; i < bufferCount; i++) { 1050 ANativeWindowBuffer *buf; 1051 int fenceFd; 1052 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1053 if (err != 0) { 1054 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1055 break; 1056 } 1057 1058 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1059 BufferInfo info; 1060 info.mStatus = BufferInfo::OWNED_BY_US; 1061 info.mFenceFd = fenceFd; 1062 info.mIsReadFence = false; 1063 info.mRenderInfo = NULL; 1064 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 1065 info.mGraphicBuffer = graphicBuffer; 1066 mBuffers[kPortIndexOutput].push(info); 1067 1068 IOMX::buffer_id bufferId; 1069 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 1070 &bufferId); 1071 if (err != 0) { 1072 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1073 "%d", i, err); 1074 break; 1075 } 1076 1077 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1078 1079 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1080 mComponentName.c_str(), 1081 bufferId, graphicBuffer.get()); 1082 } 1083 1084 OMX_U32 cancelStart; 1085 OMX_U32 cancelEnd; 1086 1087 if (err != 0) { 1088 // If an error occurred while dequeuing we need to cancel any buffers 1089 // that were dequeued. 1090 cancelStart = 0; 1091 cancelEnd = mBuffers[kPortIndexOutput].size(); 1092 } else { 1093 // Return the required minimum undequeued buffers to the native window. 1094 cancelStart = bufferCount - minUndequeuedBuffers; 1095 cancelEnd = bufferCount; 1096 } 1097 1098 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1099 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1100 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1101 status_t error = cancelBufferToNativeWindow(info); 1102 if (err == 0) { 1103 err = error; 1104 } 1105 } 1106 } 1107 1108 if (!storingMetadataInDecodedBuffers()) { 1109 static_cast<Surface*>(mNativeWindow.get()) 1110 ->getIGraphicBufferProducer()->allowAllocation(false); 1111 } 1112 1113 return err; 1114} 1115 1116status_t ACodec::allocateOutputMetadataBuffers() { 1117 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1118 status_t err = configureOutputBuffersFromNativeWindow( 1119 &bufferCount, &bufferSize, &minUndequeuedBuffers); 1120 if (err != 0) 1121 return err; 1122 mNumUndequeuedBuffers = minUndequeuedBuffers; 1123 1124 ALOGV("[%s] Allocating %u meta buffers on output port", 1125 mComponentName.c_str(), bufferCount); 1126 1127 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1128 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1129 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1130 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1131 1132 // Dequeue buffers and send them to OMX 1133 for (OMX_U32 i = 0; i < bufferCount; i++) { 1134 BufferInfo info; 1135 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1136 info.mFenceFd = -1; 1137 info.mRenderInfo = NULL; 1138 info.mGraphicBuffer = NULL; 1139 info.mDequeuedAt = mDequeueCounter; 1140 1141 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1142 if (mem == NULL || mem->pointer() == NULL) { 1143 return NO_MEMORY; 1144 } 1145 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1146 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1147 } 1148 info.mData = new ABuffer(mem->pointer(), mem->size()); 1149 1150 // we use useBuffer for metadata regardless of quirks 1151 err = mOMX->useBuffer( 1152 mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1153 info.mMemRef = mem; 1154 mBuffers[kPortIndexOutput].push(info); 1155 1156 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1157 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1158 } 1159 1160 if (mLegacyAdaptiveExperiment) { 1161 // preallocate and preregister buffers 1162 static_cast<Surface *>(mNativeWindow.get()) 1163 ->getIGraphicBufferProducer()->allowAllocation(true); 1164 1165 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1166 "output port", 1167 mComponentName.c_str(), bufferCount, bufferSize); 1168 1169 // Dequeue buffers then cancel them all 1170 for (OMX_U32 i = 0; i < bufferCount; i++) { 1171 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1172 1173 ANativeWindowBuffer *buf; 1174 int fenceFd; 1175 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1176 if (err != 0) { 1177 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1178 break; 1179 } 1180 1181 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1182 mOMX->updateGraphicBufferInMeta( 1183 mNode, kPortIndexOutput, graphicBuffer, info->mBufferID); 1184 info->mStatus = BufferInfo::OWNED_BY_US; 1185 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1186 info->mGraphicBuffer = graphicBuffer; 1187 } 1188 1189 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1190 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1191 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1192 status_t error = cancelBufferToNativeWindow(info); 1193 if (err == OK) { 1194 err = error; 1195 } 1196 } 1197 } 1198 1199 static_cast<Surface*>(mNativeWindow.get()) 1200 ->getIGraphicBufferProducer()->allowAllocation(false); 1201 } 1202 1203 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1204 return err; 1205} 1206 1207status_t ACodec::submitOutputMetadataBuffer() { 1208 CHECK(storingMetadataInDecodedBuffers()); 1209 if (mMetadataBuffersToSubmit == 0) 1210 return OK; 1211 1212 BufferInfo *info = dequeueBufferFromNativeWindow(); 1213 if (info == NULL) { 1214 return ERROR_IO; 1215 } 1216 1217 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1218 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1219 1220 --mMetadataBuffersToSubmit; 1221 info->checkWriteFence("submitOutputMetadataBuffer"); 1222 status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd); 1223 info->mFenceFd = -1; 1224 if (err == OK) { 1225 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1226 } 1227 1228 return err; 1229} 1230 1231status_t ACodec::waitForFence(int fd, const char *dbg ) { 1232 status_t res = OK; 1233 if (fd >= 0) { 1234 sp<Fence> fence = new Fence(fd); 1235 res = fence->wait(IOMX::kFenceTimeoutMs); 1236 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1237 } 1238 return res; 1239} 1240 1241// static 1242const char *ACodec::_asString(BufferInfo::Status s) { 1243 switch (s) { 1244 case BufferInfo::OWNED_BY_US: return "OUR"; 1245 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1246 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1247 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1248 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1249 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1250 default: return "?"; 1251 } 1252} 1253 1254void ACodec::dumpBuffers(OMX_U32 portIndex) { 1255 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1256 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1257 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1258 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1259 const BufferInfo &info = mBuffers[portIndex][i]; 1260 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1261 i, info.mBufferID, info.mGraphicBuffer.get(), 1262 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1263 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1264 } 1265} 1266 1267status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1268 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1269 1270 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1271 mComponentName.c_str(), info->mBufferID); 1272 1273 info->checkWriteFence("cancelBufferToNativeWindow"); 1274 int err = mNativeWindow->cancelBuffer( 1275 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1276 info->mFenceFd = -1; 1277 1278 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1279 mComponentName.c_str(), info->mBufferID); 1280 // change ownership even if cancelBuffer fails 1281 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1282 1283 return err; 1284} 1285 1286void ACodec::updateRenderInfoForDequeuedBuffer( 1287 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1288 1289 info->mRenderInfo = 1290 mRenderTracker.updateInfoForDequeuedBuffer( 1291 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1292 1293 // check for any fences already signaled 1294 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1295} 1296 1297void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1298 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1299 mRenderTracker.dumpRenderQueue(); 1300 } 1301} 1302 1303void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1304 sp<AMessage> msg = mNotify->dup(); 1305 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1306 std::list<FrameRenderTracker::Info> done = 1307 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1308 1309 // unlink untracked frames 1310 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1311 it != done.cend(); ++it) { 1312 ssize_t index = it->getIndex(); 1313 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1314 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1315 } else if (index >= 0) { 1316 // THIS SHOULD NEVER HAPPEN 1317 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1318 } 1319 } 1320 1321 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1322 msg->post(); 1323 } 1324} 1325 1326ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1327 ANativeWindowBuffer *buf; 1328 CHECK(mNativeWindow.get() != NULL); 1329 1330 if (mTunneled) { 1331 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1332 " video playback mode mode!"); 1333 return NULL; 1334 } 1335 1336 if (mFatalError) { 1337 ALOGW("not dequeuing from native window due to fatal error"); 1338 return NULL; 1339 } 1340 1341 int fenceFd = -1; 1342 do { 1343 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1344 if (err != 0) { 1345 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1346 return NULL; 1347 } 1348 1349 bool stale = false; 1350 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1351 i--; 1352 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1353 1354 if (info->mGraphicBuffer != NULL && 1355 info->mGraphicBuffer->handle == buf->handle) { 1356 // Since consumers can attach buffers to BufferQueues, it is possible 1357 // that a known yet stale buffer can return from a surface that we 1358 // once used. We can simply ignore this as we have already dequeued 1359 // this buffer properly. NOTE: this does not eliminate all cases, 1360 // e.g. it is possible that we have queued the valid buffer to the 1361 // NW, and a stale copy of the same buffer gets dequeued - which will 1362 // be treated as the valid buffer by ACodec. 1363 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1364 ALOGI("dequeued stale buffer %p. discarding", buf); 1365 stale = true; 1366 break; 1367 } 1368 1369 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1370 info->mStatus = BufferInfo::OWNED_BY_US; 1371 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1372 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1373 return info; 1374 } 1375 } 1376 1377 // It is also possible to receive a previously unregistered buffer 1378 // in non-meta mode. These should be treated as stale buffers. The 1379 // same is possible in meta mode, in which case, it will be treated 1380 // as a normal buffer, which is not desirable. 1381 // TODO: fix this. 1382 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1383 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1384 stale = true; 1385 } 1386 if (stale) { 1387 // TODO: detach stale buffer, but there is no API yet to do it. 1388 buf = NULL; 1389 } 1390 } while (buf == NULL); 1391 1392 // get oldest undequeued buffer 1393 BufferInfo *oldest = NULL; 1394 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1395 i--; 1396 BufferInfo *info = 1397 &mBuffers[kPortIndexOutput].editItemAt(i); 1398 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1399 (oldest == NULL || 1400 // avoid potential issues from counter rolling over 1401 mDequeueCounter - info->mDequeuedAt > 1402 mDequeueCounter - oldest->mDequeuedAt)) { 1403 oldest = info; 1404 } 1405 } 1406 1407 // it is impossible dequeue a buffer when there are no buffers with ANW 1408 CHECK(oldest != NULL); 1409 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1410 // while loop above does not complete 1411 CHECK(storingMetadataInDecodedBuffers()); 1412 1413 // discard buffer in LRU info and replace with new buffer 1414 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1415 oldest->mStatus = BufferInfo::OWNED_BY_US; 1416 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1417 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1418 oldest->mRenderInfo = NULL; 1419 1420 mOMX->updateGraphicBufferInMeta( 1421 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 1422 oldest->mBufferID); 1423 1424 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1425 VideoGrallocMetadata *grallocMeta = 1426 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base()); 1427 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1428 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1429 mDequeueCounter - oldest->mDequeuedAt, 1430 (void *)(uintptr_t)grallocMeta->pHandle, 1431 oldest->mGraphicBuffer->handle, oldest->mData->base()); 1432 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1433 VideoNativeMetadata *nativeMeta = 1434 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base()); 1435 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1436 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1437 mDequeueCounter - oldest->mDequeuedAt, 1438 (void *)(uintptr_t)nativeMeta->pBuffer, 1439 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base()); 1440 } 1441 1442 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1443 return oldest; 1444} 1445 1446status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1447 status_t err = OK; 1448 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1449 i--; 1450 status_t err2 = freeBuffer(portIndex, i); 1451 if (err == OK) { 1452 err = err2; 1453 } 1454 } 1455 1456 // clear mDealer even on an error 1457 mDealer[portIndex].clear(); 1458 return err; 1459} 1460 1461status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1462 status_t err = OK; 1463 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1464 i--; 1465 BufferInfo *info = 1466 &mBuffers[kPortIndexOutput].editItemAt(i); 1467 1468 // At this time some buffers may still be with the component 1469 // or being drained. 1470 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1471 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1472 status_t err2 = freeBuffer(kPortIndexOutput, i); 1473 if (err == OK) { 1474 err = err2; 1475 } 1476 } 1477 } 1478 1479 return err; 1480} 1481 1482status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1483 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1484 status_t err = OK; 1485 1486 // there should not be any fences in the metadata 1487 MetadataBufferType type = 1488 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1489 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL 1490 && info->mData->size() >= sizeof(VideoNativeMetadata)) { 1491 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; 1492 if (fenceFd >= 0) { 1493 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1494 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1495 } 1496 } 1497 1498 switch (info->mStatus) { 1499 case BufferInfo::OWNED_BY_US: 1500 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1501 (void)cancelBufferToNativeWindow(info); 1502 } 1503 // fall through 1504 1505 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1506 err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID); 1507 break; 1508 1509 default: 1510 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1511 err = FAILED_TRANSACTION; 1512 break; 1513 } 1514 1515 if (info->mFenceFd >= 0) { 1516 ::close(info->mFenceFd); 1517 } 1518 1519 if (portIndex == kPortIndexOutput) { 1520 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1521 info->mRenderInfo = NULL; 1522 } 1523 1524 // remove buffer even if mOMX->freeBuffer fails 1525 mBuffers[portIndex].removeAt(i); 1526 return err; 1527} 1528 1529ACodec::BufferInfo *ACodec::findBufferByID( 1530 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1531 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1532 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1533 1534 if (info->mBufferID == bufferID) { 1535 if (index != NULL) { 1536 *index = i; 1537 } 1538 return info; 1539 } 1540 } 1541 1542 ALOGE("Could not find buffer with ID %u", bufferID); 1543 return NULL; 1544} 1545 1546status_t ACodec::setComponentRole( 1547 bool isEncoder, const char *mime) { 1548 const char *role = getComponentRole(isEncoder, mime); 1549 if (role == NULL) { 1550 return BAD_VALUE; 1551 } 1552 status_t err = setComponentRole(mOMX, mNode, role); 1553 if (err != OK) { 1554 ALOGW("[%s] Failed to set standard component role '%s'.", 1555 mComponentName.c_str(), role); 1556 } 1557 return err; 1558} 1559 1560//static 1561const char *ACodec::getComponentRole( 1562 bool isEncoder, const char *mime) { 1563 struct MimeToRole { 1564 const char *mime; 1565 const char *decoderRole; 1566 const char *encoderRole; 1567 }; 1568 1569 static const MimeToRole kMimeToRole[] = { 1570 { MEDIA_MIMETYPE_AUDIO_MPEG, 1571 "audio_decoder.mp3", "audio_encoder.mp3" }, 1572 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I, 1573 "audio_decoder.mp1", "audio_encoder.mp1" }, 1574 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, 1575 "audio_decoder.mp2", "audio_encoder.mp2" }, 1576 { MEDIA_MIMETYPE_AUDIO_AMR_NB, 1577 "audio_decoder.amrnb", "audio_encoder.amrnb" }, 1578 { MEDIA_MIMETYPE_AUDIO_AMR_WB, 1579 "audio_decoder.amrwb", "audio_encoder.amrwb" }, 1580 { MEDIA_MIMETYPE_AUDIO_AAC, 1581 "audio_decoder.aac", "audio_encoder.aac" }, 1582 { MEDIA_MIMETYPE_AUDIO_VORBIS, 1583 "audio_decoder.vorbis", "audio_encoder.vorbis" }, 1584 { MEDIA_MIMETYPE_AUDIO_OPUS, 1585 "audio_decoder.opus", "audio_encoder.opus" }, 1586 { MEDIA_MIMETYPE_AUDIO_G711_MLAW, 1587 "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" }, 1588 { MEDIA_MIMETYPE_AUDIO_G711_ALAW, 1589 "audio_decoder.g711alaw", "audio_encoder.g711alaw" }, 1590 { MEDIA_MIMETYPE_VIDEO_AVC, 1591 "video_decoder.avc", "video_encoder.avc" }, 1592 { MEDIA_MIMETYPE_VIDEO_HEVC, 1593 "video_decoder.hevc", "video_encoder.hevc" }, 1594 { MEDIA_MIMETYPE_VIDEO_MPEG4, 1595 "video_decoder.mpeg4", "video_encoder.mpeg4" }, 1596 { MEDIA_MIMETYPE_VIDEO_H263, 1597 "video_decoder.h263", "video_encoder.h263" }, 1598 { MEDIA_MIMETYPE_VIDEO_VP8, 1599 "video_decoder.vp8", "video_encoder.vp8" }, 1600 { MEDIA_MIMETYPE_VIDEO_VP9, 1601 "video_decoder.vp9", "video_encoder.vp9" }, 1602 { MEDIA_MIMETYPE_AUDIO_RAW, 1603 "audio_decoder.raw", "audio_encoder.raw" }, 1604 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, 1605 "video_decoder.dolby-vision", "video_encoder.dolby-vision" }, 1606 { MEDIA_MIMETYPE_AUDIO_FLAC, 1607 "audio_decoder.flac", "audio_encoder.flac" }, 1608 { MEDIA_MIMETYPE_AUDIO_MSGSM, 1609 "audio_decoder.gsm", "audio_encoder.gsm" }, 1610 { MEDIA_MIMETYPE_VIDEO_MPEG2, 1611 "video_decoder.mpeg2", "video_encoder.mpeg2" }, 1612 { MEDIA_MIMETYPE_AUDIO_AC3, 1613 "audio_decoder.ac3", "audio_encoder.ac3" }, 1614 { MEDIA_MIMETYPE_AUDIO_EAC3, 1615 "audio_decoder.eac3", "audio_encoder.eac3" }, 1616 }; 1617 1618 static const size_t kNumMimeToRole = 1619 sizeof(kMimeToRole) / sizeof(kMimeToRole[0]); 1620 1621 size_t i; 1622 for (i = 0; i < kNumMimeToRole; ++i) { 1623 if (!strcasecmp(mime, kMimeToRole[i].mime)) { 1624 break; 1625 } 1626 } 1627 1628 if (i == kNumMimeToRole) { 1629 return NULL; 1630 } 1631 1632 return isEncoder ? kMimeToRole[i].encoderRole 1633 : kMimeToRole[i].decoderRole; 1634} 1635 1636//static 1637status_t ACodec::setComponentRole( 1638 const sp<IOMX> &omx, IOMX::node_id node, const char *role) { 1639 OMX_PARAM_COMPONENTROLETYPE roleParams; 1640 InitOMXParams(&roleParams); 1641 1642 strncpy((char *)roleParams.cRole, 1643 role, OMX_MAX_STRINGNAME_SIZE - 1); 1644 1645 roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0'; 1646 1647 return omx->setParameter( 1648 node, OMX_IndexParamStandardComponentRole, 1649 &roleParams, sizeof(roleParams)); 1650} 1651 1652status_t ACodec::configureCodec( 1653 const char *mime, const sp<AMessage> &msg) { 1654 int32_t encoder; 1655 if (!msg->findInt32("encoder", &encoder)) { 1656 encoder = false; 1657 } 1658 1659 sp<AMessage> inputFormat = new AMessage; 1660 sp<AMessage> outputFormat = new AMessage; 1661 mConfigFormat = msg; 1662 1663 mIsEncoder = encoder; 1664 1665 mInputMetadataType = kMetadataBufferTypeInvalid; 1666 mOutputMetadataType = kMetadataBufferTypeInvalid; 1667 1668 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1669 1670 if (err != OK) { 1671 return err; 1672 } 1673 1674 int32_t bitRate = 0; 1675 // FLAC encoder doesn't need a bitrate, other encoders do 1676 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1677 && !msg->findInt32("bitrate", &bitRate)) { 1678 return INVALID_OPERATION; 1679 } 1680 1681 int32_t storeMeta; 1682 if (encoder 1683 && msg->findInt32("store-metadata-in-buffers", &storeMeta) 1684 && storeMeta != 0) { 1685 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1686 if (err != OK) { 1687 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1688 mComponentName.c_str(), err); 1689 1690 return err; 1691 } 1692 // For this specific case we could be using camera source even if storeMetaDataInBuffers 1693 // returns Gralloc source. Pretend that we are; this will force us to use nBufferSize. 1694 if (mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1695 mInputMetadataType = kMetadataBufferTypeCameraSource; 1696 } 1697 1698 uint32_t usageBits; 1699 if (mOMX->getParameter( 1700 mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1701 &usageBits, sizeof(usageBits)) == OK) { 1702 inputFormat->setInt32( 1703 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1704 } 1705 } 1706 1707 int32_t prependSPSPPS = 0; 1708 if (encoder 1709 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1710 && prependSPSPPS != 0) { 1711 OMX_INDEXTYPE index; 1712 err = mOMX->getExtensionIndex( 1713 mNode, 1714 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1715 &index); 1716 1717 if (err == OK) { 1718 PrependSPSPPSToIDRFramesParams params; 1719 InitOMXParams(¶ms); 1720 params.bEnable = OMX_TRUE; 1721 1722 err = mOMX->setParameter( 1723 mNode, index, ¶ms, sizeof(params)); 1724 } 1725 1726 if (err != OK) { 1727 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1728 "IDR frames. (err %d)", err); 1729 1730 return err; 1731 } 1732 } 1733 1734 // Only enable metadata mode on encoder output if encoder can prepend 1735 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1736 // opaque handle, to which we don't have access. 1737 int32_t video = !strncasecmp(mime, "video/", 6); 1738 mIsVideo = video; 1739 if (encoder && video) { 1740 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1741 && msg->findInt32("store-metadata-in-buffers-output", &storeMeta) 1742 && storeMeta != 0); 1743 1744 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType); 1745 if (err != OK) { 1746 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1747 mComponentName.c_str(), err); 1748 } 1749 1750 if (!msg->findInt64( 1751 "repeat-previous-frame-after", 1752 &mRepeatFrameDelayUs)) { 1753 mRepeatFrameDelayUs = -1ll; 1754 } 1755 1756 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1757 mMaxPtsGapUs = -1ll; 1758 } 1759 1760 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1761 mMaxFps = -1; 1762 } 1763 1764 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1765 mTimePerCaptureUs = -1ll; 1766 } 1767 1768 if (!msg->findInt32( 1769 "create-input-buffers-suspended", 1770 (int32_t*)&mCreateInputBuffersSuspended)) { 1771 mCreateInputBuffersSuspended = false; 1772 } 1773 } 1774 1775 // NOTE: we only use native window for video decoders 1776 sp<RefBase> obj; 1777 bool haveNativeWindow = msg->findObject("native-window", &obj) 1778 && obj != NULL && video && !encoder; 1779 mUsingNativeWindow = haveNativeWindow; 1780 mLegacyAdaptiveExperiment = false; 1781 if (video && !encoder) { 1782 inputFormat->setInt32("adaptive-playback", false); 1783 1784 int32_t usageProtected; 1785 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1786 if (!haveNativeWindow) { 1787 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1788 return PERMISSION_DENIED; 1789 } 1790 mFlags |= kFlagIsGrallocUsageProtected; 1791 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1792 } 1793 1794 if (mFlags & kFlagIsSecure) { 1795 // use native_handles for secure input buffers 1796 err = mOMX->enableNativeBuffers( 1797 mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1798 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1799 err = OK; // ignore error for now 1800 } 1801 } 1802 if (haveNativeWindow) { 1803 sp<ANativeWindow> nativeWindow = 1804 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1805 1806 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1807 int32_t autoFrc; 1808 if (msg->findInt32("auto-frc", &autoFrc)) { 1809 bool enabled = autoFrc; 1810 OMX_CONFIG_BOOLEANTYPE config; 1811 InitOMXParams(&config); 1812 config.bEnabled = (OMX_BOOL)enabled; 1813 status_t temp = mOMX->setConfig( 1814 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1815 &config, sizeof(config)); 1816 if (temp == OK) { 1817 outputFormat->setInt32("auto-frc", enabled); 1818 } else if (enabled) { 1819 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1820 } 1821 } 1822 // END of temporary support for automatic FRC 1823 1824 int32_t tunneled; 1825 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1826 tunneled != 0) { 1827 ALOGI("Configuring TUNNELED video playback."); 1828 mTunneled = true; 1829 1830 int32_t audioHwSync = 0; 1831 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1832 ALOGW("No Audio HW Sync provided for video tunnel"); 1833 } 1834 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1835 if (err != OK) { 1836 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1837 audioHwSync, nativeWindow.get()); 1838 return err; 1839 } 1840 1841 int32_t maxWidth = 0, maxHeight = 0; 1842 if (msg->findInt32("max-width", &maxWidth) && 1843 msg->findInt32("max-height", &maxHeight)) { 1844 1845 err = mOMX->prepareForAdaptivePlayback( 1846 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1847 if (err != OK) { 1848 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1849 mComponentName.c_str(), err); 1850 // allow failure 1851 err = OK; 1852 } else { 1853 inputFormat->setInt32("max-width", maxWidth); 1854 inputFormat->setInt32("max-height", maxHeight); 1855 inputFormat->setInt32("adaptive-playback", true); 1856 } 1857 } 1858 } else { 1859 ALOGV("Configuring CPU controlled video playback."); 1860 mTunneled = false; 1861 1862 // Explicity reset the sideband handle of the window for 1863 // non-tunneled video in case the window was previously used 1864 // for a tunneled video playback. 1865 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1866 if (err != OK) { 1867 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1868 return err; 1869 } 1870 1871 // Always try to enable dynamic output buffers on native surface 1872 err = mOMX->storeMetaDataInBuffers( 1873 mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1874 if (err != OK) { 1875 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1876 mComponentName.c_str(), err); 1877 1878 // if adaptive playback has been requested, try JB fallback 1879 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1880 // LARGE MEMORY REQUIREMENT 1881 1882 // we will not do adaptive playback on software accessed 1883 // surfaces as they never had to respond to changes in the 1884 // crop window, and we don't trust that they will be able to. 1885 int usageBits = 0; 1886 bool canDoAdaptivePlayback; 1887 1888 if (nativeWindow->query( 1889 nativeWindow.get(), 1890 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1891 &usageBits) != OK) { 1892 canDoAdaptivePlayback = false; 1893 } else { 1894 canDoAdaptivePlayback = 1895 (usageBits & 1896 (GRALLOC_USAGE_SW_READ_MASK | 1897 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1898 } 1899 1900 int32_t maxWidth = 0, maxHeight = 0; 1901 if (canDoAdaptivePlayback && 1902 msg->findInt32("max-width", &maxWidth) && 1903 msg->findInt32("max-height", &maxHeight)) { 1904 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1905 mComponentName.c_str(), maxWidth, maxHeight); 1906 1907 err = mOMX->prepareForAdaptivePlayback( 1908 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1909 maxHeight); 1910 ALOGW_IF(err != OK, 1911 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1912 mComponentName.c_str(), err); 1913 1914 if (err == OK) { 1915 inputFormat->setInt32("max-width", maxWidth); 1916 inputFormat->setInt32("max-height", maxHeight); 1917 inputFormat->setInt32("adaptive-playback", true); 1918 } 1919 } 1920 // allow failure 1921 err = OK; 1922 } else { 1923 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1924 mComponentName.c_str()); 1925 CHECK(storingMetadataInDecodedBuffers()); 1926 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 1927 "legacy-adaptive", !msg->contains("no-experiments")); 1928 1929 inputFormat->setInt32("adaptive-playback", true); 1930 } 1931 1932 int32_t push; 1933 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 1934 && push != 0) { 1935 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1936 } 1937 } 1938 1939 int32_t rotationDegrees; 1940 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 1941 mRotationDegrees = rotationDegrees; 1942 } else { 1943 mRotationDegrees = 0; 1944 } 1945 } 1946 1947 if (video) { 1948 // determine need for software renderer 1949 bool usingSwRenderer = false; 1950 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 1951 usingSwRenderer = true; 1952 haveNativeWindow = false; 1953 } 1954 1955 if (encoder) { 1956 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 1957 } else { 1958 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 1959 } 1960 1961 if (err != OK) { 1962 return err; 1963 } 1964 1965 if (haveNativeWindow) { 1966 mNativeWindow = static_cast<Surface *>(obj.get()); 1967 } 1968 1969 // initialize native window now to get actual output format 1970 // TODO: this is needed for some encoders even though they don't use native window 1971 err = initNativeWindow(); 1972 if (err != OK) { 1973 return err; 1974 } 1975 1976 // fallback for devices that do not handle flex-YUV for native buffers 1977 if (haveNativeWindow) { 1978 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 1979 if (msg->findInt32("color-format", &requestedColorFormat) && 1980 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 1981 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 1982 if (err != OK) { 1983 return err; 1984 } 1985 int32_t colorFormat = OMX_COLOR_FormatUnused; 1986 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 1987 if (!outputFormat->findInt32("color-format", &colorFormat)) { 1988 ALOGE("ouptut port did not have a color format (wrong domain?)"); 1989 return BAD_VALUE; 1990 } 1991 ALOGD("[%s] Requested output format %#x and got %#x.", 1992 mComponentName.c_str(), requestedColorFormat, colorFormat); 1993 if (!isFlexibleColorFormat( 1994 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 1995 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 1996 // device did not handle flex-YUV request for native window, fall back 1997 // to SW renderer 1998 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 1999 mNativeWindow.clear(); 2000 mNativeWindowUsageBits = 0; 2001 haveNativeWindow = false; 2002 usingSwRenderer = true; 2003 if (storingMetadataInDecodedBuffers()) { 2004 err = mOMX->storeMetaDataInBuffers( 2005 mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 2006 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 2007 // TODO: implement adaptive-playback support for bytebuffer mode. 2008 // This is done by SW codecs, but most HW codecs don't support it. 2009 inputFormat->setInt32("adaptive-playback", false); 2010 } 2011 if (err == OK) { 2012 err = mOMX->enableNativeBuffers( 2013 mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 2014 } 2015 if (mFlags & kFlagIsGrallocUsageProtected) { 2016 // fallback is not supported for protected playback 2017 err = PERMISSION_DENIED; 2018 } else if (err == OK) { 2019 err = setupVideoDecoder( 2020 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2021 } 2022 } 2023 } 2024 } 2025 2026 if (usingSwRenderer) { 2027 outputFormat->setInt32("using-sw-renderer", 1); 2028 } 2029 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2030 int32_t numChannels, sampleRate; 2031 if (!msg->findInt32("channel-count", &numChannels) 2032 || !msg->findInt32("sample-rate", &sampleRate)) { 2033 // Since we did not always check for these, leave them optional 2034 // and have the decoder figure it all out. 2035 err = OK; 2036 } else { 2037 err = setupRawAudioFormat( 2038 encoder ? kPortIndexInput : kPortIndexOutput, 2039 sampleRate, 2040 numChannels); 2041 } 2042 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2043 int32_t numChannels, sampleRate; 2044 if (!msg->findInt32("channel-count", &numChannels) 2045 || !msg->findInt32("sample-rate", &sampleRate)) { 2046 err = INVALID_OPERATION; 2047 } else { 2048 int32_t isADTS, aacProfile; 2049 int32_t sbrMode; 2050 int32_t maxOutputChannelCount; 2051 int32_t pcmLimiterEnable; 2052 drcParams_t drc; 2053 if (!msg->findInt32("is-adts", &isADTS)) { 2054 isADTS = 0; 2055 } 2056 if (!msg->findInt32("aac-profile", &aacProfile)) { 2057 aacProfile = OMX_AUDIO_AACObjectNull; 2058 } 2059 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2060 sbrMode = -1; 2061 } 2062 2063 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2064 maxOutputChannelCount = -1; 2065 } 2066 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2067 // value is unknown 2068 pcmLimiterEnable = -1; 2069 } 2070 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2071 // value is unknown 2072 drc.encodedTargetLevel = -1; 2073 } 2074 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2075 // value is unknown 2076 drc.drcCut = -1; 2077 } 2078 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2079 // value is unknown 2080 drc.drcBoost = -1; 2081 } 2082 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2083 // value is unknown 2084 drc.heavyCompression = -1; 2085 } 2086 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2087 // value is unknown 2088 drc.targetRefLevel = -1; 2089 } 2090 2091 err = setupAACCodec( 2092 encoder, numChannels, sampleRate, bitRate, aacProfile, 2093 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2094 pcmLimiterEnable); 2095 } 2096 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2097 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2098 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2099 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2100 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2101 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2102 // These are PCM-like formats with a fixed sample rate but 2103 // a variable number of channels. 2104 2105 int32_t numChannels; 2106 if (!msg->findInt32("channel-count", &numChannels)) { 2107 err = INVALID_OPERATION; 2108 } else { 2109 int32_t sampleRate; 2110 if (!msg->findInt32("sample-rate", &sampleRate)) { 2111 sampleRate = 8000; 2112 } 2113 err = setupG711Codec(encoder, sampleRate, numChannels); 2114 } 2115 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2116 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2117 if (encoder && 2118 (!msg->findInt32("channel-count", &numChannels) 2119 || !msg->findInt32("sample-rate", &sampleRate))) { 2120 ALOGE("missing channel count or sample rate for FLAC encoder"); 2121 err = INVALID_OPERATION; 2122 } else { 2123 if (encoder) { 2124 if (!msg->findInt32( 2125 "complexity", &compressionLevel) && 2126 !msg->findInt32( 2127 "flac-compression-level", &compressionLevel)) { 2128 compressionLevel = 5; // default FLAC compression level 2129 } else if (compressionLevel < 0) { 2130 ALOGW("compression level %d outside [0..8] range, " 2131 "using 0", 2132 compressionLevel); 2133 compressionLevel = 0; 2134 } else if (compressionLevel > 8) { 2135 ALOGW("compression level %d outside [0..8] range, " 2136 "using 8", 2137 compressionLevel); 2138 compressionLevel = 8; 2139 } 2140 } 2141 err = setupFlacCodec( 2142 encoder, numChannels, sampleRate, compressionLevel); 2143 } 2144 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2145 int32_t numChannels, sampleRate; 2146 if (encoder 2147 || !msg->findInt32("channel-count", &numChannels) 2148 || !msg->findInt32("sample-rate", &sampleRate)) { 2149 err = INVALID_OPERATION; 2150 } else { 2151 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels); 2152 } 2153 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2154 int32_t numChannels; 2155 int32_t sampleRate; 2156 if (!msg->findInt32("channel-count", &numChannels) 2157 || !msg->findInt32("sample-rate", &sampleRate)) { 2158 err = INVALID_OPERATION; 2159 } else { 2160 err = setupAC3Codec(encoder, numChannels, sampleRate); 2161 } 2162 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2163 int32_t numChannels; 2164 int32_t sampleRate; 2165 if (!msg->findInt32("channel-count", &numChannels) 2166 || !msg->findInt32("sample-rate", &sampleRate)) { 2167 err = INVALID_OPERATION; 2168 } else { 2169 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2170 } 2171 } 2172 2173 if (err != OK) { 2174 return err; 2175 } 2176 2177 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2178 mEncoderDelay = 0; 2179 } 2180 2181 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2182 mEncoderPadding = 0; 2183 } 2184 2185 if (msg->findInt32("channel-mask", &mChannelMask)) { 2186 mChannelMaskPresent = true; 2187 } else { 2188 mChannelMaskPresent = false; 2189 } 2190 2191 int32_t maxInputSize; 2192 if (msg->findInt32("max-input-size", &maxInputSize)) { 2193 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2194 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2195 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2196 } 2197 2198 int32_t priority; 2199 if (msg->findInt32("priority", &priority)) { 2200 err = setPriority(priority); 2201 } 2202 2203 int32_t rateInt = -1; 2204 float rateFloat = -1; 2205 if (!msg->findFloat("operating-rate", &rateFloat)) { 2206 msg->findInt32("operating-rate", &rateInt); 2207 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2208 } 2209 if (rateFloat > 0) { 2210 err = setOperatingRate(rateFloat, video); 2211 } 2212 2213 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame. 2214 mBaseOutputFormat = outputFormat; 2215 // trigger a kWhatOutputFormatChanged msg on first buffer 2216 mLastOutputFormat.clear(); 2217 2218 err = getPortFormat(kPortIndexInput, inputFormat); 2219 if (err == OK) { 2220 err = getPortFormat(kPortIndexOutput, outputFormat); 2221 if (err == OK) { 2222 mInputFormat = inputFormat; 2223 mOutputFormat = outputFormat; 2224 } 2225 } 2226 return err; 2227} 2228 2229status_t ACodec::setPriority(int32_t priority) { 2230 if (priority < 0) { 2231 return BAD_VALUE; 2232 } 2233 OMX_PARAM_U32TYPE config; 2234 InitOMXParams(&config); 2235 config.nU32 = (OMX_U32)priority; 2236 status_t temp = mOMX->setConfig( 2237 mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2238 &config, sizeof(config)); 2239 if (temp != OK) { 2240 ALOGI("codec does not support config priority (err %d)", temp); 2241 } 2242 return OK; 2243} 2244 2245status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2246 if (rateFloat < 0) { 2247 return BAD_VALUE; 2248 } 2249 OMX_U32 rate; 2250 if (isVideo) { 2251 if (rateFloat > 65535) { 2252 return BAD_VALUE; 2253 } 2254 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2255 } else { 2256 if (rateFloat > UINT_MAX) { 2257 return BAD_VALUE; 2258 } 2259 rate = (OMX_U32)(rateFloat); 2260 } 2261 OMX_PARAM_U32TYPE config; 2262 InitOMXParams(&config); 2263 config.nU32 = rate; 2264 status_t err = mOMX->setConfig( 2265 mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2266 &config, sizeof(config)); 2267 if (err != OK) { 2268 ALOGI("codec does not support config operating rate (err %d)", err); 2269 } 2270 return OK; 2271} 2272 2273status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2274 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2275 InitOMXParams(¶ms); 2276 params.nPortIndex = kPortIndexOutput; 2277 status_t err = mOMX->getConfig( 2278 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2279 if (err == OK) { 2280 *intraRefreshPeriod = params.nRefreshPeriod; 2281 return OK; 2282 } 2283 2284 // Fallback to query through standard OMX index. 2285 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2286 InitOMXParams(&refreshParams); 2287 refreshParams.nPortIndex = kPortIndexOutput; 2288 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2289 err = mOMX->getParameter( 2290 mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2291 if (err != OK || refreshParams.nCirMBs == 0) { 2292 *intraRefreshPeriod = 0; 2293 return OK; 2294 } 2295 2296 // Calculate period based on width and height 2297 uint32_t width, height; 2298 OMX_PARAM_PORTDEFINITIONTYPE def; 2299 InitOMXParams(&def); 2300 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2301 def.nPortIndex = kPortIndexOutput; 2302 err = mOMX->getParameter( 2303 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2304 if (err != OK) { 2305 *intraRefreshPeriod = 0; 2306 return err; 2307 } 2308 width = video_def->nFrameWidth; 2309 height = video_def->nFrameHeight; 2310 // Use H.264/AVC MacroBlock size 16x16 2311 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2312 2313 return OK; 2314} 2315 2316status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2317 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2318 InitOMXParams(¶ms); 2319 params.nPortIndex = kPortIndexOutput; 2320 params.nRefreshPeriod = intraRefreshPeriod; 2321 status_t err = mOMX->setConfig( 2322 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2323 if (err == OK) { 2324 return OK; 2325 } 2326 2327 // Only in configure state, a component could invoke setParameter. 2328 if (!inConfigure) { 2329 return INVALID_OPERATION; 2330 } else { 2331 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2332 } 2333 2334 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2335 InitOMXParams(&refreshParams); 2336 refreshParams.nPortIndex = kPortIndexOutput; 2337 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2338 2339 if (intraRefreshPeriod == 0) { 2340 // 0 means disable intra refresh. 2341 refreshParams.nCirMBs = 0; 2342 } else { 2343 // Calculate macroblocks that need to be intra coded base on width and height 2344 uint32_t width, height; 2345 OMX_PARAM_PORTDEFINITIONTYPE def; 2346 InitOMXParams(&def); 2347 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2348 def.nPortIndex = kPortIndexOutput; 2349 err = mOMX->getParameter( 2350 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2351 if (err != OK) { 2352 return err; 2353 } 2354 width = video_def->nFrameWidth; 2355 height = video_def->nFrameHeight; 2356 // Use H.264/AVC MacroBlock size 16x16 2357 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2358 } 2359 2360 err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh, 2361 &refreshParams, sizeof(refreshParams)); 2362 if (err != OK) { 2363 return err; 2364 } 2365 2366 return OK; 2367} 2368 2369status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2370 OMX_PARAM_PORTDEFINITIONTYPE def; 2371 InitOMXParams(&def); 2372 def.nPortIndex = portIndex; 2373 2374 status_t err = mOMX->getParameter( 2375 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2376 2377 if (err != OK) { 2378 return err; 2379 } 2380 2381 if (def.nBufferSize >= size) { 2382 return OK; 2383 } 2384 2385 def.nBufferSize = size; 2386 2387 err = mOMX->setParameter( 2388 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2389 2390 if (err != OK) { 2391 return err; 2392 } 2393 2394 err = mOMX->getParameter( 2395 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2396 2397 if (err != OK) { 2398 return err; 2399 } 2400 2401 if (def.nBufferSize < size) { 2402 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2403 return FAILED_TRANSACTION; 2404 } 2405 2406 return OK; 2407} 2408 2409status_t ACodec::selectAudioPortFormat( 2410 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2411 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2412 InitOMXParams(&format); 2413 2414 format.nPortIndex = portIndex; 2415 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2416 format.nIndex = index; 2417 status_t err = mOMX->getParameter( 2418 mNode, OMX_IndexParamAudioPortFormat, 2419 &format, sizeof(format)); 2420 2421 if (err != OK) { 2422 return err; 2423 } 2424 2425 if (format.eEncoding == desiredFormat) { 2426 break; 2427 } 2428 2429 if (index == kMaxIndicesToCheck) { 2430 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2431 mComponentName.c_str(), index, 2432 asString(format.eEncoding), format.eEncoding); 2433 return ERROR_UNSUPPORTED; 2434 } 2435 } 2436 2437 return mOMX->setParameter( 2438 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2439} 2440 2441status_t ACodec::setupAACCodec( 2442 bool encoder, int32_t numChannels, int32_t sampleRate, 2443 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2444 int32_t maxOutputChannelCount, const drcParams_t& drc, 2445 int32_t pcmLimiterEnable) { 2446 if (encoder && isADTS) { 2447 return -EINVAL; 2448 } 2449 2450 status_t err = setupRawAudioFormat( 2451 encoder ? kPortIndexInput : kPortIndexOutput, 2452 sampleRate, 2453 numChannels); 2454 2455 if (err != OK) { 2456 return err; 2457 } 2458 2459 if (encoder) { 2460 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2461 2462 if (err != OK) { 2463 return err; 2464 } 2465 2466 OMX_PARAM_PORTDEFINITIONTYPE def; 2467 InitOMXParams(&def); 2468 def.nPortIndex = kPortIndexOutput; 2469 2470 err = mOMX->getParameter( 2471 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2472 2473 if (err != OK) { 2474 return err; 2475 } 2476 2477 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2478 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2479 2480 err = mOMX->setParameter( 2481 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2482 2483 if (err != OK) { 2484 return err; 2485 } 2486 2487 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2488 InitOMXParams(&profile); 2489 profile.nPortIndex = kPortIndexOutput; 2490 2491 err = mOMX->getParameter( 2492 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2493 2494 if (err != OK) { 2495 return err; 2496 } 2497 2498 profile.nChannels = numChannels; 2499 2500 profile.eChannelMode = 2501 (numChannels == 1) 2502 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2503 2504 profile.nSampleRate = sampleRate; 2505 profile.nBitRate = bitRate; 2506 profile.nAudioBandWidth = 0; 2507 profile.nFrameLength = 0; 2508 profile.nAACtools = OMX_AUDIO_AACToolAll; 2509 profile.nAACERtools = OMX_AUDIO_AACERNone; 2510 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2511 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2512 switch (sbrMode) { 2513 case 0: 2514 // disable sbr 2515 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2516 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2517 break; 2518 case 1: 2519 // enable single-rate sbr 2520 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2521 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2522 break; 2523 case 2: 2524 // enable dual-rate sbr 2525 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2526 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2527 break; 2528 case -1: 2529 // enable both modes -> the codec will decide which mode should be used 2530 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2531 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2532 break; 2533 default: 2534 // unsupported sbr mode 2535 return BAD_VALUE; 2536 } 2537 2538 2539 err = mOMX->setParameter( 2540 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2541 2542 if (err != OK) { 2543 return err; 2544 } 2545 2546 return err; 2547 } 2548 2549 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2550 InitOMXParams(&profile); 2551 profile.nPortIndex = kPortIndexInput; 2552 2553 err = mOMX->getParameter( 2554 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2555 2556 if (err != OK) { 2557 return err; 2558 } 2559 2560 profile.nChannels = numChannels; 2561 profile.nSampleRate = sampleRate; 2562 2563 profile.eAACStreamFormat = 2564 isADTS 2565 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2566 : OMX_AUDIO_AACStreamFormatMP4FF; 2567 2568 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2569 InitOMXParams(&presentation); 2570 presentation.nMaxOutputChannels = maxOutputChannelCount; 2571 presentation.nDrcCut = drc.drcCut; 2572 presentation.nDrcBoost = drc.drcBoost; 2573 presentation.nHeavyCompression = drc.heavyCompression; 2574 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2575 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2576 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2577 2578 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2579 if (res == OK) { 2580 // optional parameters, will not cause configuration failure 2581 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2582 &presentation, sizeof(presentation)); 2583 } else { 2584 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2585 } 2586 return res; 2587} 2588 2589status_t ACodec::setupAC3Codec( 2590 bool encoder, int32_t numChannels, int32_t sampleRate) { 2591 status_t err = setupRawAudioFormat( 2592 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2593 2594 if (err != OK) { 2595 return err; 2596 } 2597 2598 if (encoder) { 2599 ALOGW("AC3 encoding is not supported."); 2600 return INVALID_OPERATION; 2601 } 2602 2603 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2604 InitOMXParams(&def); 2605 def.nPortIndex = kPortIndexInput; 2606 2607 err = mOMX->getParameter( 2608 mNode, 2609 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2610 &def, 2611 sizeof(def)); 2612 2613 if (err != OK) { 2614 return err; 2615 } 2616 2617 def.nChannels = numChannels; 2618 def.nSampleRate = sampleRate; 2619 2620 return mOMX->setParameter( 2621 mNode, 2622 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2623 &def, 2624 sizeof(def)); 2625} 2626 2627status_t ACodec::setupEAC3Codec( 2628 bool encoder, int32_t numChannels, int32_t sampleRate) { 2629 status_t err = setupRawAudioFormat( 2630 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2631 2632 if (err != OK) { 2633 return err; 2634 } 2635 2636 if (encoder) { 2637 ALOGW("EAC3 encoding is not supported."); 2638 return INVALID_OPERATION; 2639 } 2640 2641 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2642 InitOMXParams(&def); 2643 def.nPortIndex = kPortIndexInput; 2644 2645 err = mOMX->getParameter( 2646 mNode, 2647 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2648 &def, 2649 sizeof(def)); 2650 2651 if (err != OK) { 2652 return err; 2653 } 2654 2655 def.nChannels = numChannels; 2656 def.nSampleRate = sampleRate; 2657 2658 return mOMX->setParameter( 2659 mNode, 2660 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2661 &def, 2662 sizeof(def)); 2663} 2664 2665static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2666 bool isAMRWB, int32_t bps) { 2667 if (isAMRWB) { 2668 if (bps <= 6600) { 2669 return OMX_AUDIO_AMRBandModeWB0; 2670 } else if (bps <= 8850) { 2671 return OMX_AUDIO_AMRBandModeWB1; 2672 } else if (bps <= 12650) { 2673 return OMX_AUDIO_AMRBandModeWB2; 2674 } else if (bps <= 14250) { 2675 return OMX_AUDIO_AMRBandModeWB3; 2676 } else if (bps <= 15850) { 2677 return OMX_AUDIO_AMRBandModeWB4; 2678 } else if (bps <= 18250) { 2679 return OMX_AUDIO_AMRBandModeWB5; 2680 } else if (bps <= 19850) { 2681 return OMX_AUDIO_AMRBandModeWB6; 2682 } else if (bps <= 23050) { 2683 return OMX_AUDIO_AMRBandModeWB7; 2684 } 2685 2686 // 23850 bps 2687 return OMX_AUDIO_AMRBandModeWB8; 2688 } else { // AMRNB 2689 if (bps <= 4750) { 2690 return OMX_AUDIO_AMRBandModeNB0; 2691 } else if (bps <= 5150) { 2692 return OMX_AUDIO_AMRBandModeNB1; 2693 } else if (bps <= 5900) { 2694 return OMX_AUDIO_AMRBandModeNB2; 2695 } else if (bps <= 6700) { 2696 return OMX_AUDIO_AMRBandModeNB3; 2697 } else if (bps <= 7400) { 2698 return OMX_AUDIO_AMRBandModeNB4; 2699 } else if (bps <= 7950) { 2700 return OMX_AUDIO_AMRBandModeNB5; 2701 } else if (bps <= 10200) { 2702 return OMX_AUDIO_AMRBandModeNB6; 2703 } 2704 2705 // 12200 bps 2706 return OMX_AUDIO_AMRBandModeNB7; 2707 } 2708} 2709 2710status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2711 OMX_AUDIO_PARAM_AMRTYPE def; 2712 InitOMXParams(&def); 2713 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2714 2715 status_t err = 2716 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2717 2718 if (err != OK) { 2719 return err; 2720 } 2721 2722 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2723 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2724 2725 err = mOMX->setParameter( 2726 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2727 2728 if (err != OK) { 2729 return err; 2730 } 2731 2732 return setupRawAudioFormat( 2733 encoder ? kPortIndexInput : kPortIndexOutput, 2734 isWAMR ? 16000 : 8000 /* sampleRate */, 2735 1 /* numChannels */); 2736} 2737 2738status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2739 if (encoder) { 2740 return INVALID_OPERATION; 2741 } 2742 2743 return setupRawAudioFormat( 2744 kPortIndexInput, sampleRate, numChannels); 2745} 2746 2747status_t ACodec::setupFlacCodec( 2748 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2749 2750 if (encoder) { 2751 OMX_AUDIO_PARAM_FLACTYPE def; 2752 InitOMXParams(&def); 2753 def.nPortIndex = kPortIndexOutput; 2754 2755 // configure compression level 2756 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2757 if (err != OK) { 2758 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2759 return err; 2760 } 2761 def.nCompressionLevel = compressionLevel; 2762 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2763 if (err != OK) { 2764 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2765 return err; 2766 } 2767 } 2768 2769 return setupRawAudioFormat( 2770 encoder ? kPortIndexInput : kPortIndexOutput, 2771 sampleRate, 2772 numChannels); 2773} 2774 2775status_t ACodec::setupRawAudioFormat( 2776 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels) { 2777 OMX_PARAM_PORTDEFINITIONTYPE def; 2778 InitOMXParams(&def); 2779 def.nPortIndex = portIndex; 2780 2781 status_t err = mOMX->getParameter( 2782 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2783 2784 if (err != OK) { 2785 return err; 2786 } 2787 2788 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2789 2790 err = mOMX->setParameter( 2791 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2792 2793 if (err != OK) { 2794 return err; 2795 } 2796 2797 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2798 InitOMXParams(&pcmParams); 2799 pcmParams.nPortIndex = portIndex; 2800 2801 err = mOMX->getParameter( 2802 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2803 2804 if (err != OK) { 2805 return err; 2806 } 2807 2808 pcmParams.nChannels = numChannels; 2809 pcmParams.eNumData = OMX_NumericalDataSigned; 2810 pcmParams.bInterleaved = OMX_TRUE; 2811 pcmParams.nBitPerSample = 16; 2812 pcmParams.nSamplingRate = sampleRate; 2813 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 2814 2815 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 2816 return OMX_ErrorNone; 2817 } 2818 2819 return mOMX->setParameter( 2820 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2821} 2822 2823status_t ACodec::configureTunneledVideoPlayback( 2824 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 2825 native_handle_t* sidebandHandle; 2826 2827 status_t err = mOMX->configureVideoTunnelMode( 2828 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 2829 if (err != OK) { 2830 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 2831 return err; 2832 } 2833 2834 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 2835 if (err != OK) { 2836 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 2837 sidebandHandle, err); 2838 return err; 2839 } 2840 2841 return OK; 2842} 2843 2844status_t ACodec::setVideoPortFormatType( 2845 OMX_U32 portIndex, 2846 OMX_VIDEO_CODINGTYPE compressionFormat, 2847 OMX_COLOR_FORMATTYPE colorFormat, 2848 bool usingNativeBuffers) { 2849 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2850 InitOMXParams(&format); 2851 format.nPortIndex = portIndex; 2852 format.nIndex = 0; 2853 bool found = false; 2854 2855 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2856 format.nIndex = index; 2857 status_t err = mOMX->getParameter( 2858 mNode, OMX_IndexParamVideoPortFormat, 2859 &format, sizeof(format)); 2860 2861 if (err != OK) { 2862 return err; 2863 } 2864 2865 // substitute back flexible color format to codec supported format 2866 OMX_U32 flexibleEquivalent; 2867 if (compressionFormat == OMX_VIDEO_CodingUnused 2868 && isFlexibleColorFormat( 2869 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 2870 && colorFormat == flexibleEquivalent) { 2871 ALOGI("[%s] using color format %#x in place of %#x", 2872 mComponentName.c_str(), format.eColorFormat, colorFormat); 2873 colorFormat = format.eColorFormat; 2874 } 2875 2876 // The following assertion is violated by TI's video decoder. 2877 // CHECK_EQ(format.nIndex, index); 2878 2879 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 2880 if (portIndex == kPortIndexInput 2881 && colorFormat == format.eColorFormat) { 2882 // eCompressionFormat does not seem right. 2883 found = true; 2884 break; 2885 } 2886 if (portIndex == kPortIndexOutput 2887 && compressionFormat == format.eCompressionFormat) { 2888 // eColorFormat does not seem right. 2889 found = true; 2890 break; 2891 } 2892 } 2893 2894 if (format.eCompressionFormat == compressionFormat 2895 && format.eColorFormat == colorFormat) { 2896 found = true; 2897 break; 2898 } 2899 2900 if (index == kMaxIndicesToCheck) { 2901 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 2902 mComponentName.c_str(), index, 2903 asString(format.eCompressionFormat), format.eCompressionFormat, 2904 asString(format.eColorFormat), format.eColorFormat); 2905 } 2906 } 2907 2908 if (!found) { 2909 return UNKNOWN_ERROR; 2910 } 2911 2912 status_t err = mOMX->setParameter( 2913 mNode, OMX_IndexParamVideoPortFormat, 2914 &format, sizeof(format)); 2915 2916 return err; 2917} 2918 2919// Set optimal output format. OMX component lists output formats in the order 2920// of preference, but this got more complicated since the introduction of flexible 2921// YUV formats. We support a legacy behavior for applications that do not use 2922// surface output, do not specify an output format, but expect a "usable" standard 2923// OMX format. SW readable and standard formats must be flex-YUV. 2924// 2925// Suggested preference order: 2926// - optimal format for texture rendering (mediaplayer behavior) 2927// - optimal SW readable & texture renderable format (flex-YUV support) 2928// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 2929// - legacy "usable" standard formats 2930// 2931// For legacy support, we prefer a standard format, but will settle for a SW readable 2932// flex-YUV format. 2933status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 2934 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 2935 InitOMXParams(&format); 2936 format.nPortIndex = kPortIndexOutput; 2937 2938 InitOMXParams(&legacyFormat); 2939 // this field will change when we find a suitable legacy format 2940 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 2941 2942 for (OMX_U32 index = 0; ; ++index) { 2943 format.nIndex = index; 2944 status_t err = mOMX->getParameter( 2945 mNode, OMX_IndexParamVideoPortFormat, 2946 &format, sizeof(format)); 2947 if (err != OK) { 2948 // no more formats, pick legacy format if found 2949 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 2950 memcpy(&format, &legacyFormat, sizeof(format)); 2951 break; 2952 } 2953 return err; 2954 } 2955 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 2956 return OMX_ErrorBadParameter; 2957 } 2958 if (!getLegacyFlexibleFormat) { 2959 break; 2960 } 2961 // standard formats that were exposed to users before 2962 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 2963 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 2964 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 2965 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 2966 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 2967 break; 2968 } 2969 // find best legacy non-standard format 2970 OMX_U32 flexibleEquivalent; 2971 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 2972 && isFlexibleColorFormat( 2973 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */, 2974 &flexibleEquivalent) 2975 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 2976 memcpy(&legacyFormat, &format, sizeof(format)); 2977 } 2978 } 2979 return mOMX->setParameter( 2980 mNode, OMX_IndexParamVideoPortFormat, 2981 &format, sizeof(format)); 2982} 2983 2984static const struct VideoCodingMapEntry { 2985 const char *mMime; 2986 OMX_VIDEO_CODINGTYPE mVideoCodingType; 2987} kVideoCodingMapEntry[] = { 2988 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 2989 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 2990 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 2991 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 2992 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 2993 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 2994 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 2995 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 2996}; 2997 2998static status_t GetVideoCodingTypeFromMime( 2999 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3000 for (size_t i = 0; 3001 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3002 ++i) { 3003 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3004 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3005 return OK; 3006 } 3007 } 3008 3009 *codingType = OMX_VIDEO_CodingUnused; 3010 3011 return ERROR_UNSUPPORTED; 3012} 3013 3014static status_t GetMimeTypeForVideoCoding( 3015 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3016 for (size_t i = 0; 3017 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3018 ++i) { 3019 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3020 *mime = kVideoCodingMapEntry[i].mMime; 3021 return OK; 3022 } 3023 } 3024 3025 mime->clear(); 3026 3027 return ERROR_UNSUPPORTED; 3028} 3029 3030status_t ACodec::setupVideoDecoder( 3031 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3032 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3033 int32_t width, height; 3034 if (!msg->findInt32("width", &width) 3035 || !msg->findInt32("height", &height)) { 3036 return INVALID_OPERATION; 3037 } 3038 3039 OMX_VIDEO_CODINGTYPE compressionFormat; 3040 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3041 3042 if (err != OK) { 3043 return err; 3044 } 3045 3046 err = setVideoPortFormatType( 3047 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3048 3049 if (err != OK) { 3050 return err; 3051 } 3052 3053 int32_t tmp; 3054 if (msg->findInt32("color-format", &tmp)) { 3055 OMX_COLOR_FORMATTYPE colorFormat = 3056 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3057 err = setVideoPortFormatType( 3058 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3059 if (err != OK) { 3060 ALOGW("[%s] does not support color format %d", 3061 mComponentName.c_str(), colorFormat); 3062 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3063 } 3064 } else { 3065 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3066 } 3067 3068 if (err != OK) { 3069 return err; 3070 } 3071 3072 int32_t frameRateInt; 3073 float frameRateFloat; 3074 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3075 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3076 frameRateInt = -1; 3077 } 3078 frameRateFloat = (float)frameRateInt; 3079 } 3080 3081 err = setVideoFormatOnPort( 3082 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3083 3084 if (err != OK) { 3085 return err; 3086 } 3087 3088 err = setVideoFormatOnPort( 3089 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3090 3091 if (err != OK) { 3092 return err; 3093 } 3094 3095 err = setColorAspectsForVideoDecoder( 3096 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3097 if (err == ERROR_UNSUPPORTED) { // support is optional 3098 err = OK; 3099 } 3100 return err; 3101} 3102 3103status_t ACodec::initDescribeColorAspectsIndex() { 3104 status_t err = mOMX->getExtensionIndex( 3105 mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3106 if (err != OK) { 3107 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3108 } 3109 return err; 3110} 3111 3112status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3113 status_t err = ERROR_UNSUPPORTED; 3114 if (mDescribeColorAspectsIndex) { 3115 err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3116 } 3117 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3118 mComponentName.c_str(), 3119 params.sAspects.mRange, asString(params.sAspects.mRange), 3120 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3121 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3122 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3123 err, asString(err)); 3124 3125 if (verify && err == OK) { 3126 err = getCodecColorAspects(params); 3127 } 3128 3129 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3130 "[%s] getting color aspects failed even though codec advertises support", 3131 mComponentName.c_str()); 3132 return err; 3133} 3134 3135status_t ACodec::setColorAspectsForVideoDecoder( 3136 int32_t width, int32_t height, bool usingNativeWindow, 3137 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3138 DescribeColorAspectsParams params; 3139 InitOMXParams(¶ms); 3140 params.nPortIndex = kPortIndexOutput; 3141 3142 getColorAspectsFromFormat(configFormat, params.sAspects); 3143 if (usingNativeWindow) { 3144 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3145 // The default aspects will be set back to the output format during the 3146 // getFormat phase of configure(). Set non-Unspecified values back into the 3147 // format, in case component does not support this enumeration. 3148 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3149 } 3150 3151 (void)initDescribeColorAspectsIndex(); 3152 3153 // communicate color aspects to codec 3154 return setCodecColorAspects(params); 3155} 3156 3157status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3158 status_t err = ERROR_UNSUPPORTED; 3159 if (mDescribeColorAspectsIndex) { 3160 err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3161 } 3162 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3163 mComponentName.c_str(), 3164 params.sAspects.mRange, asString(params.sAspects.mRange), 3165 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3166 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3167 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3168 err, asString(err)); 3169 if (params.bRequestingDataSpace) { 3170 ALOGV("for dataspace %#x", params.nDataSpace); 3171 } 3172 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3173 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3174 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3175 mComponentName.c_str()); 3176 } 3177 return err; 3178} 3179 3180status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3181 DescribeColorAspectsParams params; 3182 InitOMXParams(¶ms); 3183 params.nPortIndex = kPortIndexInput; 3184 status_t err = getCodecColorAspects(params); 3185 if (err == OK) { 3186 // we only set encoder input aspects if codec supports them 3187 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3188 } 3189 return err; 3190} 3191 3192status_t ACodec::getDataSpace( 3193 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3194 bool tryCodec) { 3195 status_t err = OK; 3196 if (tryCodec) { 3197 // request dataspace guidance from codec. 3198 params.bRequestingDataSpace = OMX_TRUE; 3199 err = getCodecColorAspects(params); 3200 params.bRequestingDataSpace = OMX_FALSE; 3201 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3202 *dataSpace = (android_dataspace)params.nDataSpace; 3203 return err; 3204 } else if (err == ERROR_UNSUPPORTED) { 3205 // ignore not-implemented error for dataspace requests 3206 err = OK; 3207 } 3208 } 3209 3210 // this returns legacy versions if available 3211 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3212 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3213 "and dataspace %#x", 3214 mComponentName.c_str(), 3215 params.sAspects.mRange, asString(params.sAspects.mRange), 3216 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3217 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3218 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3219 *dataSpace); 3220 return err; 3221} 3222 3223 3224status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3225 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3226 android_dataspace *dataSpace) { 3227 DescribeColorAspectsParams params; 3228 InitOMXParams(¶ms); 3229 params.nPortIndex = kPortIndexOutput; 3230 3231 // reset default format and get resulting format 3232 getColorAspectsFromFormat(configFormat, params.sAspects); 3233 if (dataSpace != NULL) { 3234 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3235 } 3236 status_t err = setCodecColorAspects(params, true /* readBack */); 3237 3238 // we always set specified aspects for decoders 3239 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3240 3241 if (dataSpace != NULL) { 3242 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3243 if (err == OK) { 3244 err = res; 3245 } 3246 } 3247 3248 return err; 3249} 3250 3251// initial video encoder setup for bytebuffer mode 3252status_t ACodec::setColorAspectsForVideoEncoder( 3253 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3254 // copy config to output format as this is not exposed via getFormat 3255 copyColorConfig(configFormat, outputFormat); 3256 3257 DescribeColorAspectsParams params; 3258 InitOMXParams(¶ms); 3259 params.nPortIndex = kPortIndexInput; 3260 getColorAspectsFromFormat(configFormat, params.sAspects); 3261 3262 (void)initDescribeColorAspectsIndex(); 3263 3264 int32_t usingRecorder; 3265 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3266 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3267 int32_t width, height; 3268 if (configFormat->findInt32("width", &width) 3269 && configFormat->findInt32("height", &height)) { 3270 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3271 status_t err = getDataSpace( 3272 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3273 if (err != OK) { 3274 return err; 3275 } 3276 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3277 } 3278 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3279 } 3280 3281 // communicate color aspects to codec, but do not allow change of the platform aspects 3282 ColorAspects origAspects = params.sAspects; 3283 for (int triesLeft = 2; --triesLeft >= 0; ) { 3284 status_t err = setCodecColorAspects(params, true /* readBack */); 3285 if (err != OK 3286 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3287 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3288 return err; 3289 } 3290 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3291 mComponentName.c_str()); 3292 } 3293 return OK; 3294} 3295 3296// subsequent initial video encoder setup for surface mode 3297status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3298 android_dataspace *dataSpace /* nonnull */) { 3299 DescribeColorAspectsParams params; 3300 InitOMXParams(¶ms); 3301 params.nPortIndex = kPortIndexInput; 3302 ColorAspects &aspects = params.sAspects; 3303 3304 // reset default format and store resulting format into both input and output formats 3305 getColorAspectsFromFormat(mConfigFormat, aspects); 3306 int32_t width, height; 3307 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3308 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3309 } 3310 setColorAspectsIntoFormat(aspects, mInputFormat); 3311 setColorAspectsIntoFormat(aspects, mOutputFormat); 3312 3313 // communicate color aspects to codec, but do not allow any change 3314 ColorAspects origAspects = aspects; 3315 status_t err = OK; 3316 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3317 status_t err = setCodecColorAspects(params, true /* readBack */); 3318 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3319 break; 3320 } 3321 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3322 mComponentName.c_str()); 3323 } 3324 3325 *dataSpace = HAL_DATASPACE_BT709; 3326 aspects = origAspects; // restore desired color aspects 3327 status_t res = getDataSpace( 3328 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3329 if (err == OK) { 3330 err = res; 3331 } 3332 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3333 mInputFormat->setBuffer( 3334 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3335 3336 // update input format with codec supported color aspects (basically set unsupported 3337 // aspects to Unspecified) 3338 if (err == OK) { 3339 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3340 } 3341 3342 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3343 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3344 3345 return err; 3346} 3347 3348status_t ACodec::setupVideoEncoder( 3349 const char *mime, const sp<AMessage> &msg, 3350 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3351 int32_t tmp; 3352 if (!msg->findInt32("color-format", &tmp)) { 3353 return INVALID_OPERATION; 3354 } 3355 3356 OMX_COLOR_FORMATTYPE colorFormat = 3357 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3358 3359 status_t err = setVideoPortFormatType( 3360 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3361 3362 if (err != OK) { 3363 ALOGE("[%s] does not support color format %d", 3364 mComponentName.c_str(), colorFormat); 3365 3366 return err; 3367 } 3368 3369 /* Input port configuration */ 3370 3371 OMX_PARAM_PORTDEFINITIONTYPE def; 3372 InitOMXParams(&def); 3373 3374 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3375 3376 def.nPortIndex = kPortIndexInput; 3377 3378 err = mOMX->getParameter( 3379 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3380 3381 if (err != OK) { 3382 return err; 3383 } 3384 3385 int32_t width, height, bitrate; 3386 if (!msg->findInt32("width", &width) 3387 || !msg->findInt32("height", &height) 3388 || !msg->findInt32("bitrate", &bitrate)) { 3389 return INVALID_OPERATION; 3390 } 3391 3392 video_def->nFrameWidth = width; 3393 video_def->nFrameHeight = height; 3394 3395 int32_t stride; 3396 if (!msg->findInt32("stride", &stride)) { 3397 stride = width; 3398 } 3399 3400 video_def->nStride = stride; 3401 3402 int32_t sliceHeight; 3403 if (!msg->findInt32("slice-height", &sliceHeight)) { 3404 sliceHeight = height; 3405 } 3406 3407 video_def->nSliceHeight = sliceHeight; 3408 3409 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3410 3411 float frameRate; 3412 if (!msg->findFloat("frame-rate", &frameRate)) { 3413 int32_t tmp; 3414 if (!msg->findInt32("frame-rate", &tmp)) { 3415 return INVALID_OPERATION; 3416 } 3417 frameRate = (float)tmp; 3418 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3419 } 3420 3421 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3422 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3423 // this is redundant as it was already set up in setVideoPortFormatType 3424 // FIXME for now skip this only for flexible YUV formats 3425 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3426 video_def->eColorFormat = colorFormat; 3427 } 3428 3429 err = mOMX->setParameter( 3430 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3431 3432 if (err != OK) { 3433 ALOGE("[%s] failed to set input port definition parameters.", 3434 mComponentName.c_str()); 3435 3436 return err; 3437 } 3438 3439 /* Output port configuration */ 3440 3441 OMX_VIDEO_CODINGTYPE compressionFormat; 3442 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3443 3444 if (err != OK) { 3445 return err; 3446 } 3447 3448 err = setVideoPortFormatType( 3449 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3450 3451 if (err != OK) { 3452 ALOGE("[%s] does not support compression format %d", 3453 mComponentName.c_str(), compressionFormat); 3454 3455 return err; 3456 } 3457 3458 def.nPortIndex = kPortIndexOutput; 3459 3460 err = mOMX->getParameter( 3461 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3462 3463 if (err != OK) { 3464 return err; 3465 } 3466 3467 video_def->nFrameWidth = width; 3468 video_def->nFrameHeight = height; 3469 video_def->xFramerate = 0; 3470 video_def->nBitrate = bitrate; 3471 video_def->eCompressionFormat = compressionFormat; 3472 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3473 3474 err = mOMX->setParameter( 3475 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3476 3477 if (err != OK) { 3478 ALOGE("[%s] failed to set output port definition parameters.", 3479 mComponentName.c_str()); 3480 3481 return err; 3482 } 3483 3484 int32_t intraRefreshPeriod = 0; 3485 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3486 && intraRefreshPeriod >= 0) { 3487 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3488 if (err != OK) { 3489 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3490 mComponentName.c_str()); 3491 err = OK; 3492 } 3493 } 3494 3495 switch (compressionFormat) { 3496 case OMX_VIDEO_CodingMPEG4: 3497 err = setupMPEG4EncoderParameters(msg); 3498 break; 3499 3500 case OMX_VIDEO_CodingH263: 3501 err = setupH263EncoderParameters(msg); 3502 break; 3503 3504 case OMX_VIDEO_CodingAVC: 3505 err = setupAVCEncoderParameters(msg); 3506 break; 3507 3508 case OMX_VIDEO_CodingHEVC: 3509 err = setupHEVCEncoderParameters(msg); 3510 break; 3511 3512 case OMX_VIDEO_CodingVP8: 3513 case OMX_VIDEO_CodingVP9: 3514 err = setupVPXEncoderParameters(msg); 3515 break; 3516 3517 default: 3518 break; 3519 } 3520 3521 // Set up color aspects on input, but propagate them to the output format, as they will 3522 // not be read back from encoder. 3523 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3524 if (err == ERROR_UNSUPPORTED) { 3525 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3526 err = OK; 3527 } 3528 3529 if (err == OK) { 3530 ALOGI("setupVideoEncoder succeeded"); 3531 } 3532 3533 return err; 3534} 3535 3536status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3537 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3538 InitOMXParams(¶ms); 3539 params.nPortIndex = kPortIndexOutput; 3540 3541 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3542 3543 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3544 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3545 int32_t mbs; 3546 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3547 return INVALID_OPERATION; 3548 } 3549 params.nCirMBs = mbs; 3550 } 3551 3552 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3553 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3554 int32_t mbs; 3555 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3556 return INVALID_OPERATION; 3557 } 3558 params.nAirMBs = mbs; 3559 3560 int32_t ref; 3561 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3562 return INVALID_OPERATION; 3563 } 3564 params.nAirRef = ref; 3565 } 3566 3567 status_t err = mOMX->setParameter( 3568 mNode, OMX_IndexParamVideoIntraRefresh, 3569 ¶ms, sizeof(params)); 3570 return err; 3571} 3572 3573static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) { 3574 if (iFramesInterval < 0) { 3575 return 0xFFFFFFFF; 3576 } else if (iFramesInterval == 0) { 3577 return 0; 3578 } 3579 OMX_U32 ret = frameRate * iFramesInterval; 3580 return ret; 3581} 3582 3583static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 3584 int32_t tmp; 3585 if (!msg->findInt32("bitrate-mode", &tmp)) { 3586 return OMX_Video_ControlRateVariable; 3587 } 3588 3589 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 3590} 3591 3592status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 3593 int32_t bitrate, iFrameInterval; 3594 if (!msg->findInt32("bitrate", &bitrate) 3595 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3596 return INVALID_OPERATION; 3597 } 3598 3599 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3600 3601 float frameRate; 3602 if (!msg->findFloat("frame-rate", &frameRate)) { 3603 int32_t tmp; 3604 if (!msg->findInt32("frame-rate", &tmp)) { 3605 return INVALID_OPERATION; 3606 } 3607 frameRate = (float)tmp; 3608 } 3609 3610 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 3611 InitOMXParams(&mpeg4type); 3612 mpeg4type.nPortIndex = kPortIndexOutput; 3613 3614 status_t err = mOMX->getParameter( 3615 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3616 3617 if (err != OK) { 3618 return err; 3619 } 3620 3621 mpeg4type.nSliceHeaderSpacing = 0; 3622 mpeg4type.bSVH = OMX_FALSE; 3623 mpeg4type.bGov = OMX_FALSE; 3624 3625 mpeg4type.nAllowedPictureTypes = 3626 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3627 3628 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3629 if (mpeg4type.nPFrames == 0) { 3630 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3631 } 3632 mpeg4type.nBFrames = 0; 3633 mpeg4type.nIDCVLCThreshold = 0; 3634 mpeg4type.bACPred = OMX_TRUE; 3635 mpeg4type.nMaxPacketSize = 256; 3636 mpeg4type.nTimeIncRes = 1000; 3637 mpeg4type.nHeaderExtension = 0; 3638 mpeg4type.bReversibleVLC = OMX_FALSE; 3639 3640 int32_t profile; 3641 if (msg->findInt32("profile", &profile)) { 3642 int32_t level; 3643 if (!msg->findInt32("level", &level)) { 3644 return INVALID_OPERATION; 3645 } 3646 3647 err = verifySupportForProfileAndLevel(profile, level); 3648 3649 if (err != OK) { 3650 return err; 3651 } 3652 3653 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 3654 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 3655 } 3656 3657 err = mOMX->setParameter( 3658 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3659 3660 if (err != OK) { 3661 return err; 3662 } 3663 3664 err = configureBitrate(bitrate, bitrateMode); 3665 3666 if (err != OK) { 3667 return err; 3668 } 3669 3670 return setupErrorCorrectionParameters(); 3671} 3672 3673status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 3674 int32_t bitrate, iFrameInterval; 3675 if (!msg->findInt32("bitrate", &bitrate) 3676 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3677 return INVALID_OPERATION; 3678 } 3679 3680 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3681 3682 float frameRate; 3683 if (!msg->findFloat("frame-rate", &frameRate)) { 3684 int32_t tmp; 3685 if (!msg->findInt32("frame-rate", &tmp)) { 3686 return INVALID_OPERATION; 3687 } 3688 frameRate = (float)tmp; 3689 } 3690 3691 OMX_VIDEO_PARAM_H263TYPE h263type; 3692 InitOMXParams(&h263type); 3693 h263type.nPortIndex = kPortIndexOutput; 3694 3695 status_t err = mOMX->getParameter( 3696 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3697 3698 if (err != OK) { 3699 return err; 3700 } 3701 3702 h263type.nAllowedPictureTypes = 3703 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3704 3705 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3706 if (h263type.nPFrames == 0) { 3707 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3708 } 3709 h263type.nBFrames = 0; 3710 3711 int32_t profile; 3712 if (msg->findInt32("profile", &profile)) { 3713 int32_t level; 3714 if (!msg->findInt32("level", &level)) { 3715 return INVALID_OPERATION; 3716 } 3717 3718 err = verifySupportForProfileAndLevel(profile, level); 3719 3720 if (err != OK) { 3721 return err; 3722 } 3723 3724 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 3725 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 3726 } 3727 3728 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 3729 h263type.bForceRoundingTypeToZero = OMX_FALSE; 3730 h263type.nPictureHeaderRepetition = 0; 3731 h263type.nGOBHeaderInterval = 0; 3732 3733 err = mOMX->setParameter( 3734 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3735 3736 if (err != OK) { 3737 return err; 3738 } 3739 3740 err = configureBitrate(bitrate, bitrateMode); 3741 3742 if (err != OK) { 3743 return err; 3744 } 3745 3746 return setupErrorCorrectionParameters(); 3747} 3748 3749// static 3750int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 3751 int width, int height, int rate, int bitrate, 3752 OMX_VIDEO_AVCPROFILETYPE profile) { 3753 // convert bitrate to main/baseline profile kbps equivalent 3754 switch (profile) { 3755 case OMX_VIDEO_AVCProfileHigh10: 3756 bitrate = divUp(bitrate, 3000); break; 3757 case OMX_VIDEO_AVCProfileHigh: 3758 bitrate = divUp(bitrate, 1250); break; 3759 default: 3760 bitrate = divUp(bitrate, 1000); break; 3761 } 3762 3763 // convert size and rate to MBs 3764 width = divUp(width, 16); 3765 height = divUp(height, 16); 3766 int mbs = width * height; 3767 rate *= mbs; 3768 int maxDimension = max(width, height); 3769 3770 static const int limits[][5] = { 3771 /* MBps MB dim bitrate level */ 3772 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 3773 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 3774 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 3775 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 3776 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 3777 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 3778 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 3779 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 3780 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 3781 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 3782 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 3783 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 3784 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 3785 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 3786 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 3787 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 3788 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 3789 }; 3790 3791 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 3792 const int (&limit)[5] = limits[i]; 3793 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 3794 && bitrate <= limit[3]) { 3795 return limit[4]; 3796 } 3797 } 3798 return 0; 3799} 3800 3801status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 3802 int32_t bitrate, iFrameInterval; 3803 if (!msg->findInt32("bitrate", &bitrate) 3804 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3805 return INVALID_OPERATION; 3806 } 3807 3808 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3809 3810 float frameRate; 3811 if (!msg->findFloat("frame-rate", &frameRate)) { 3812 int32_t tmp; 3813 if (!msg->findInt32("frame-rate", &tmp)) { 3814 return INVALID_OPERATION; 3815 } 3816 frameRate = (float)tmp; 3817 } 3818 3819 status_t err = OK; 3820 int32_t intraRefreshMode = 0; 3821 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 3822 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 3823 if (err != OK) { 3824 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 3825 err, intraRefreshMode); 3826 return err; 3827 } 3828 } 3829 3830 OMX_VIDEO_PARAM_AVCTYPE h264type; 3831 InitOMXParams(&h264type); 3832 h264type.nPortIndex = kPortIndexOutput; 3833 3834 err = mOMX->getParameter( 3835 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 3836 3837 if (err != OK) { 3838 return err; 3839 } 3840 3841 h264type.nAllowedPictureTypes = 3842 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3843 3844 int32_t profile; 3845 if (msg->findInt32("profile", &profile)) { 3846 int32_t level; 3847 if (!msg->findInt32("level", &level)) { 3848 return INVALID_OPERATION; 3849 } 3850 3851 err = verifySupportForProfileAndLevel(profile, level); 3852 3853 if (err != OK) { 3854 return err; 3855 } 3856 3857 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 3858 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 3859 } 3860 3861 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 3862 h264type.nSliceHeaderSpacing = 0; 3863 h264type.bUseHadamard = OMX_TRUE; 3864 h264type.nRefFrames = 1; 3865 h264type.nBFrames = 0; 3866 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3867 if (h264type.nPFrames == 0) { 3868 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3869 } 3870 h264type.nRefIdx10ActiveMinus1 = 0; 3871 h264type.nRefIdx11ActiveMinus1 = 0; 3872 h264type.bEntropyCodingCABAC = OMX_FALSE; 3873 h264type.bWeightedPPrediction = OMX_FALSE; 3874 h264type.bconstIpred = OMX_FALSE; 3875 h264type.bDirect8x8Inference = OMX_FALSE; 3876 h264type.bDirectSpatialTemporal = OMX_FALSE; 3877 h264type.nCabacInitIdc = 0; 3878 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 3879 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 3880 h264type.nSliceHeaderSpacing = 0; 3881 h264type.bUseHadamard = OMX_TRUE; 3882 h264type.nRefFrames = 2; 3883 h264type.nBFrames = 1; 3884 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3885 h264type.nAllowedPictureTypes = 3886 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 3887 h264type.nRefIdx10ActiveMinus1 = 0; 3888 h264type.nRefIdx11ActiveMinus1 = 0; 3889 h264type.bEntropyCodingCABAC = OMX_TRUE; 3890 h264type.bWeightedPPrediction = OMX_TRUE; 3891 h264type.bconstIpred = OMX_TRUE; 3892 h264type.bDirect8x8Inference = OMX_TRUE; 3893 h264type.bDirectSpatialTemporal = OMX_TRUE; 3894 h264type.nCabacInitIdc = 1; 3895 } 3896 3897 if (h264type.nBFrames != 0) { 3898 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 3899 } 3900 3901 h264type.bEnableUEP = OMX_FALSE; 3902 h264type.bEnableFMO = OMX_FALSE; 3903 h264type.bEnableASO = OMX_FALSE; 3904 h264type.bEnableRS = OMX_FALSE; 3905 h264type.bFrameMBsOnly = OMX_TRUE; 3906 h264type.bMBAFF = OMX_FALSE; 3907 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 3908 3909 err = mOMX->setParameter( 3910 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 3911 3912 if (err != OK) { 3913 return err; 3914 } 3915 3916 return configureBitrate(bitrate, bitrateMode); 3917} 3918 3919status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 3920 int32_t bitrate, iFrameInterval; 3921 if (!msg->findInt32("bitrate", &bitrate) 3922 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3923 return INVALID_OPERATION; 3924 } 3925 3926 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3927 3928 float frameRate; 3929 if (!msg->findFloat("frame-rate", &frameRate)) { 3930 int32_t tmp; 3931 if (!msg->findInt32("frame-rate", &tmp)) { 3932 return INVALID_OPERATION; 3933 } 3934 frameRate = (float)tmp; 3935 } 3936 3937 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 3938 InitOMXParams(&hevcType); 3939 hevcType.nPortIndex = kPortIndexOutput; 3940 3941 status_t err = OK; 3942 err = mOMX->getParameter( 3943 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 3944 if (err != OK) { 3945 return err; 3946 } 3947 3948 int32_t profile; 3949 if (msg->findInt32("profile", &profile)) { 3950 int32_t level; 3951 if (!msg->findInt32("level", &level)) { 3952 return INVALID_OPERATION; 3953 } 3954 3955 err = verifySupportForProfileAndLevel(profile, level); 3956 if (err != OK) { 3957 return err; 3958 } 3959 3960 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 3961 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 3962 } 3963 // TODO: finer control? 3964 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 3965 3966 err = mOMX->setParameter( 3967 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 3968 if (err != OK) { 3969 return err; 3970 } 3971 3972 return configureBitrate(bitrate, bitrateMode); 3973} 3974 3975status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { 3976 int32_t bitrate; 3977 int32_t iFrameInterval = 0; 3978 size_t tsLayers = 0; 3979 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 3980 OMX_VIDEO_VPXTemporalLayerPatternNone; 3981 static const uint32_t kVp8LayerRateAlloction 3982 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 3983 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 3984 {100, 100, 100}, // 1 layer 3985 { 60, 100, 100}, // 2 layers {60%, 40%} 3986 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 3987 }; 3988 if (!msg->findInt32("bitrate", &bitrate)) { 3989 return INVALID_OPERATION; 3990 } 3991 msg->findInt32("i-frame-interval", &iFrameInterval); 3992 3993 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3994 3995 float frameRate; 3996 if (!msg->findFloat("frame-rate", &frameRate)) { 3997 int32_t tmp; 3998 if (!msg->findInt32("frame-rate", &tmp)) { 3999 return INVALID_OPERATION; 4000 } 4001 frameRate = (float)tmp; 4002 } 4003 4004 AString tsSchema; 4005 if (msg->findString("ts-schema", &tsSchema)) { 4006 if (tsSchema == "webrtc.vp8.1-layer") { 4007 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4008 tsLayers = 1; 4009 } else if (tsSchema == "webrtc.vp8.2-layer") { 4010 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4011 tsLayers = 2; 4012 } else if (tsSchema == "webrtc.vp8.3-layer") { 4013 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4014 tsLayers = 3; 4015 } else { 4016 ALOGW("Unsupported ts-schema [%s]", tsSchema.c_str()); 4017 } 4018 } 4019 4020 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4021 InitOMXParams(&vp8type); 4022 vp8type.nPortIndex = kPortIndexOutput; 4023 status_t err = mOMX->getParameter( 4024 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4025 &vp8type, sizeof(vp8type)); 4026 4027 if (err == OK) { 4028 if (iFrameInterval > 0) { 4029 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4030 } 4031 vp8type.eTemporalPattern = pattern; 4032 vp8type.nTemporalLayerCount = tsLayers; 4033 if (tsLayers > 0) { 4034 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4035 vp8type.nTemporalLayerBitrateRatio[i] = 4036 kVp8LayerRateAlloction[tsLayers - 1][i]; 4037 } 4038 } 4039 if (bitrateMode == OMX_Video_ControlRateConstant) { 4040 vp8type.nMinQuantizer = 2; 4041 vp8type.nMaxQuantizer = 63; 4042 } 4043 4044 err = mOMX->setParameter( 4045 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4046 &vp8type, sizeof(vp8type)); 4047 if (err != OK) { 4048 ALOGW("Extended VP8 parameters set failed: %d", err); 4049 } 4050 } 4051 4052 return configureBitrate(bitrate, bitrateMode); 4053} 4054 4055status_t ACodec::verifySupportForProfileAndLevel( 4056 int32_t profile, int32_t level) { 4057 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4058 InitOMXParams(¶ms); 4059 params.nPortIndex = kPortIndexOutput; 4060 4061 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4062 params.nProfileIndex = index; 4063 status_t err = mOMX->getParameter( 4064 mNode, 4065 OMX_IndexParamVideoProfileLevelQuerySupported, 4066 ¶ms, 4067 sizeof(params)); 4068 4069 if (err != OK) { 4070 return err; 4071 } 4072 4073 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4074 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4075 4076 if (profile == supportedProfile && level <= supportedLevel) { 4077 return OK; 4078 } 4079 4080 if (index == kMaxIndicesToCheck) { 4081 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4082 mComponentName.c_str(), index, 4083 params.eProfile, params.eLevel); 4084 } 4085 } 4086 return ERROR_UNSUPPORTED; 4087} 4088 4089status_t ACodec::configureBitrate( 4090 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4091 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4092 InitOMXParams(&bitrateType); 4093 bitrateType.nPortIndex = kPortIndexOutput; 4094 4095 status_t err = mOMX->getParameter( 4096 mNode, OMX_IndexParamVideoBitrate, 4097 &bitrateType, sizeof(bitrateType)); 4098 4099 if (err != OK) { 4100 return err; 4101 } 4102 4103 bitrateType.eControlRate = bitrateMode; 4104 bitrateType.nTargetBitrate = bitrate; 4105 4106 return mOMX->setParameter( 4107 mNode, OMX_IndexParamVideoBitrate, 4108 &bitrateType, sizeof(bitrateType)); 4109} 4110 4111status_t ACodec::setupErrorCorrectionParameters() { 4112 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4113 InitOMXParams(&errorCorrectionType); 4114 errorCorrectionType.nPortIndex = kPortIndexOutput; 4115 4116 status_t err = mOMX->getParameter( 4117 mNode, OMX_IndexParamVideoErrorCorrection, 4118 &errorCorrectionType, sizeof(errorCorrectionType)); 4119 4120 if (err != OK) { 4121 return OK; // Optional feature. Ignore this failure 4122 } 4123 4124 errorCorrectionType.bEnableHEC = OMX_FALSE; 4125 errorCorrectionType.bEnableResync = OMX_TRUE; 4126 errorCorrectionType.nResynchMarkerSpacing = 256; 4127 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4128 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4129 4130 return mOMX->setParameter( 4131 mNode, OMX_IndexParamVideoErrorCorrection, 4132 &errorCorrectionType, sizeof(errorCorrectionType)); 4133} 4134 4135status_t ACodec::setVideoFormatOnPort( 4136 OMX_U32 portIndex, 4137 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4138 float frameRate) { 4139 OMX_PARAM_PORTDEFINITIONTYPE def; 4140 InitOMXParams(&def); 4141 def.nPortIndex = portIndex; 4142 4143 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4144 4145 status_t err = mOMX->getParameter( 4146 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4147 if (err != OK) { 4148 return err; 4149 } 4150 4151 if (portIndex == kPortIndexInput) { 4152 // XXX Need a (much) better heuristic to compute input buffer sizes. 4153 const size_t X = 64 * 1024; 4154 if (def.nBufferSize < X) { 4155 def.nBufferSize = X; 4156 } 4157 } 4158 4159 if (def.eDomain != OMX_PortDomainVideo) { 4160 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4161 return FAILED_TRANSACTION; 4162 } 4163 4164 video_def->nFrameWidth = width; 4165 video_def->nFrameHeight = height; 4166 4167 if (portIndex == kPortIndexInput) { 4168 video_def->eCompressionFormat = compressionFormat; 4169 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4170 if (frameRate >= 0) { 4171 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4172 } 4173 } 4174 4175 err = mOMX->setParameter( 4176 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4177 4178 return err; 4179} 4180 4181status_t ACodec::initNativeWindow() { 4182 if (mNativeWindow != NULL) { 4183 return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4184 } 4185 4186 mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4187 return OK; 4188} 4189 4190size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4191 size_t n = 0; 4192 4193 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4194 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4195 4196 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4197 ++n; 4198 } 4199 } 4200 4201 return n; 4202} 4203 4204size_t ACodec::countBuffersOwnedByNativeWindow() const { 4205 size_t n = 0; 4206 4207 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4208 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4209 4210 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4211 ++n; 4212 } 4213 } 4214 4215 return n; 4216} 4217 4218void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4219 if (mNativeWindow == NULL) { 4220 return; 4221 } 4222 4223 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4224 && dequeueBufferFromNativeWindow() != NULL) { 4225 // these buffers will be submitted as regular buffers; account for this 4226 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4227 --mMetadataBuffersToSubmit; 4228 } 4229 } 4230} 4231 4232bool ACodec::allYourBuffersAreBelongToUs( 4233 OMX_U32 portIndex) { 4234 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4235 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4236 4237 if (info->mStatus != BufferInfo::OWNED_BY_US 4238 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4239 ALOGV("[%s] Buffer %u on port %u still has status %d", 4240 mComponentName.c_str(), 4241 info->mBufferID, portIndex, info->mStatus); 4242 return false; 4243 } 4244 } 4245 4246 return true; 4247} 4248 4249bool ACodec::allYourBuffersAreBelongToUs() { 4250 return allYourBuffersAreBelongToUs(kPortIndexInput) 4251 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4252} 4253 4254void ACodec::deferMessage(const sp<AMessage> &msg) { 4255 mDeferredQueue.push_back(msg); 4256} 4257 4258void ACodec::processDeferredMessages() { 4259 List<sp<AMessage> > queue = mDeferredQueue; 4260 mDeferredQueue.clear(); 4261 4262 List<sp<AMessage> >::iterator it = queue.begin(); 4263 while (it != queue.end()) { 4264 onMessageReceived(*it++); 4265 } 4266} 4267 4268// static 4269bool ACodec::describeDefaultColorFormat(DescribeColorFormat2Params ¶ms) { 4270 MediaImage2 &image = params.sMediaImage; 4271 memset(&image, 0, sizeof(image)); 4272 4273 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4274 image.mNumPlanes = 0; 4275 4276 const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat; 4277 image.mWidth = params.nFrameWidth; 4278 image.mHeight = params.nFrameHeight; 4279 4280 // only supporting YUV420 4281 if (fmt != OMX_COLOR_FormatYUV420Planar && 4282 fmt != OMX_COLOR_FormatYUV420PackedPlanar && 4283 fmt != OMX_COLOR_FormatYUV420SemiPlanar && 4284 fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar && 4285 fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) { 4286 ALOGW("do not know color format 0x%x = %d", fmt, fmt); 4287 return false; 4288 } 4289 4290 // TEMPORARY FIX for some vendors that advertise sliceHeight as 0 4291 if (params.nStride != 0 && params.nSliceHeight == 0) { 4292 ALOGW("using sliceHeight=%u instead of what codec advertised (=0)", 4293 params.nFrameHeight); 4294 params.nSliceHeight = params.nFrameHeight; 4295 } 4296 4297 // we need stride and slice-height to be non-zero and sensible. These values were chosen to 4298 // prevent integer overflows further down the line, and do not indicate support for 4299 // 32kx32k video. 4300 if (params.nStride == 0 || params.nSliceHeight == 0 4301 || params.nStride > 32768 || params.nSliceHeight > 32768) { 4302 ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u", 4303 fmt, fmt, params.nStride, params.nSliceHeight); 4304 return false; 4305 } 4306 4307 // set-up YUV format 4308 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV; 4309 image.mNumPlanes = 3; 4310 image.mBitDepth = 8; 4311 image.mBitDepthAllocated = 8; 4312 image.mPlane[image.Y].mOffset = 0; 4313 image.mPlane[image.Y].mColInc = 1; 4314 image.mPlane[image.Y].mRowInc = params.nStride; 4315 image.mPlane[image.Y].mHorizSubsampling = 1; 4316 image.mPlane[image.Y].mVertSubsampling = 1; 4317 4318 switch ((int)fmt) { 4319 case HAL_PIXEL_FORMAT_YV12: 4320 if (params.bUsingNativeBuffers) { 4321 size_t ystride = align(params.nStride, 16); 4322 size_t cstride = align(params.nStride / 2, 16); 4323 image.mPlane[image.Y].mRowInc = ystride; 4324 4325 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight; 4326 image.mPlane[image.V].mColInc = 1; 4327 image.mPlane[image.V].mRowInc = cstride; 4328 image.mPlane[image.V].mHorizSubsampling = 2; 4329 image.mPlane[image.V].mVertSubsampling = 2; 4330 4331 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset 4332 + (cstride * params.nSliceHeight / 2); 4333 image.mPlane[image.U].mColInc = 1; 4334 image.mPlane[image.U].mRowInc = cstride; 4335 image.mPlane[image.U].mHorizSubsampling = 2; 4336 image.mPlane[image.U].mVertSubsampling = 2; 4337 break; 4338 } else { 4339 // fall through as YV12 is used for YUV420Planar by some codecs 4340 } 4341 4342 case OMX_COLOR_FormatYUV420Planar: 4343 case OMX_COLOR_FormatYUV420PackedPlanar: 4344 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4345 image.mPlane[image.U].mColInc = 1; 4346 image.mPlane[image.U].mRowInc = params.nStride / 2; 4347 image.mPlane[image.U].mHorizSubsampling = 2; 4348 image.mPlane[image.U].mVertSubsampling = 2; 4349 4350 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset 4351 + (params.nStride * params.nSliceHeight / 4); 4352 image.mPlane[image.V].mColInc = 1; 4353 image.mPlane[image.V].mRowInc = params.nStride / 2; 4354 image.mPlane[image.V].mHorizSubsampling = 2; 4355 image.mPlane[image.V].mVertSubsampling = 2; 4356 break; 4357 4358 case OMX_COLOR_FormatYUV420SemiPlanar: 4359 // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder 4360 case OMX_COLOR_FormatYUV420PackedSemiPlanar: 4361 // NV12 4362 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4363 image.mPlane[image.U].mColInc = 2; 4364 image.mPlane[image.U].mRowInc = params.nStride; 4365 image.mPlane[image.U].mHorizSubsampling = 2; 4366 image.mPlane[image.U].mVertSubsampling = 2; 4367 4368 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1; 4369 image.mPlane[image.V].mColInc = 2; 4370 image.mPlane[image.V].mRowInc = params.nStride; 4371 image.mPlane[image.V].mHorizSubsampling = 2; 4372 image.mPlane[image.V].mVertSubsampling = 2; 4373 break; 4374 4375 default: 4376 TRESPASS(); 4377 } 4378 return true; 4379} 4380 4381// static 4382bool ACodec::describeColorFormat( 4383 const sp<IOMX> &omx, IOMX::node_id node, 4384 DescribeColorFormat2Params &describeParams) 4385{ 4386 OMX_INDEXTYPE describeColorFormatIndex; 4387 if (omx->getExtensionIndex( 4388 node, "OMX.google.android.index.describeColorFormat", 4389 &describeColorFormatIndex) == OK) { 4390 DescribeColorFormatParams describeParamsV1(describeParams); 4391 if (omx->getParameter( 4392 node, describeColorFormatIndex, 4393 &describeParamsV1, sizeof(describeParamsV1)) == OK) { 4394 describeParams.initFromV1(describeParamsV1); 4395 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4396 } 4397 } else if (omx->getExtensionIndex( 4398 node, "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK 4399 && omx->getParameter( 4400 node, describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) { 4401 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4402 } 4403 4404 return describeDefaultColorFormat(describeParams); 4405} 4406 4407// static 4408bool ACodec::isFlexibleColorFormat( 4409 const sp<IOMX> &omx, IOMX::node_id node, 4410 uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) { 4411 DescribeColorFormat2Params describeParams; 4412 InitOMXParams(&describeParams); 4413 describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; 4414 // reasonable dummy values 4415 describeParams.nFrameWidth = 128; 4416 describeParams.nFrameHeight = 128; 4417 describeParams.nStride = 128; 4418 describeParams.nSliceHeight = 128; 4419 describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers; 4420 4421 CHECK(flexibleEquivalent != NULL); 4422 4423 if (!describeColorFormat(omx, node, describeParams)) { 4424 return false; 4425 } 4426 4427 const MediaImage2 &img = describeParams.sMediaImage; 4428 if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) { 4429 if (img.mNumPlanes != 3 4430 || img.mPlane[img.Y].mHorizSubsampling != 1 4431 || img.mPlane[img.Y].mVertSubsampling != 1) { 4432 return false; 4433 } 4434 4435 // YUV 420 4436 if (img.mPlane[img.U].mHorizSubsampling == 2 4437 && img.mPlane[img.U].mVertSubsampling == 2 4438 && img.mPlane[img.V].mHorizSubsampling == 2 4439 && img.mPlane[img.V].mVertSubsampling == 2) { 4440 // possible flexible YUV420 format 4441 if (img.mBitDepth <= 8) { 4442 *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible; 4443 return true; 4444 } 4445 } 4446 } 4447 return false; 4448} 4449 4450status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4451 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4452 OMX_PARAM_PORTDEFINITIONTYPE def; 4453 InitOMXParams(&def); 4454 def.nPortIndex = portIndex; 4455 4456 status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4457 if (err != OK) { 4458 return err; 4459 } 4460 4461 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4462 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4463 return BAD_VALUE; 4464 } 4465 4466 switch (def.eDomain) { 4467 case OMX_PortDomainVideo: 4468 { 4469 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4470 switch ((int)videoDef->eCompressionFormat) { 4471 case OMX_VIDEO_CodingUnused: 4472 { 4473 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4474 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4475 4476 notify->setInt32("stride", videoDef->nStride); 4477 notify->setInt32("slice-height", videoDef->nSliceHeight); 4478 notify->setInt32("color-format", videoDef->eColorFormat); 4479 4480 if (mNativeWindow == NULL) { 4481 DescribeColorFormat2Params describeParams; 4482 InitOMXParams(&describeParams); 4483 describeParams.eColorFormat = videoDef->eColorFormat; 4484 describeParams.nFrameWidth = videoDef->nFrameWidth; 4485 describeParams.nFrameHeight = videoDef->nFrameHeight; 4486 describeParams.nStride = videoDef->nStride; 4487 describeParams.nSliceHeight = videoDef->nSliceHeight; 4488 describeParams.bUsingNativeBuffers = OMX_FALSE; 4489 4490 if (describeColorFormat(mOMX, mNode, describeParams)) { 4491 notify->setBuffer( 4492 "image-data", 4493 ABuffer::CreateAsCopy( 4494 &describeParams.sMediaImage, 4495 sizeof(describeParams.sMediaImage))); 4496 4497 MediaImage2 &img = describeParams.sMediaImage; 4498 MediaImage2::PlaneInfo *plane = img.mPlane; 4499 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4500 mComponentName.c_str(), img.mWidth, img.mHeight, 4501 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4502 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4503 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4504 } 4505 } 4506 4507 int32_t width = (int32_t)videoDef->nFrameWidth; 4508 int32_t height = (int32_t)videoDef->nFrameHeight; 4509 4510 if (portIndex == kPortIndexOutput) { 4511 OMX_CONFIG_RECTTYPE rect; 4512 InitOMXParams(&rect); 4513 rect.nPortIndex = portIndex; 4514 4515 if (mOMX->getConfig( 4516 mNode, 4517 (portIndex == kPortIndexOutput ? 4518 OMX_IndexConfigCommonOutputCrop : 4519 OMX_IndexConfigCommonInputCrop), 4520 &rect, sizeof(rect)) != OK) { 4521 rect.nLeft = 0; 4522 rect.nTop = 0; 4523 rect.nWidth = videoDef->nFrameWidth; 4524 rect.nHeight = videoDef->nFrameHeight; 4525 } 4526 4527 if (rect.nLeft < 0 || 4528 rect.nTop < 0 || 4529 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4530 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4531 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 4532 rect.nLeft, rect.nTop, 4533 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 4534 videoDef->nFrameWidth, videoDef->nFrameHeight); 4535 return BAD_VALUE; 4536 } 4537 4538 notify->setRect( 4539 "crop", 4540 rect.nLeft, 4541 rect.nTop, 4542 rect.nLeft + rect.nWidth - 1, 4543 rect.nTop + rect.nHeight - 1); 4544 4545 width = rect.nWidth; 4546 height = rect.nHeight; 4547 4548 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4549 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4550 width, height, mConfigFormat, notify, 4551 mUsingNativeWindow ? &dataSpace : NULL); 4552 if (mUsingNativeWindow) { 4553 notify->setInt32("android._dataspace", dataSpace); 4554 } 4555 } else { 4556 (void)getInputColorAspectsForVideoEncoder(notify); 4557 } 4558 4559 break; 4560 } 4561 4562 case OMX_VIDEO_CodingVP8: 4563 case OMX_VIDEO_CodingVP9: 4564 { 4565 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4566 InitOMXParams(&vp8type); 4567 vp8type.nPortIndex = kPortIndexOutput; 4568 status_t err = mOMX->getParameter( 4569 mNode, 4570 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4571 &vp8type, 4572 sizeof(vp8type)); 4573 4574 if (err == OK) { 4575 AString tsSchema = "none"; 4576 if (vp8type.eTemporalPattern 4577 == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { 4578 switch (vp8type.nTemporalLayerCount) { 4579 case 1: 4580 { 4581 tsSchema = "webrtc.vp8.1-layer"; 4582 break; 4583 } 4584 case 2: 4585 { 4586 tsSchema = "webrtc.vp8.2-layer"; 4587 break; 4588 } 4589 case 3: 4590 { 4591 tsSchema = "webrtc.vp8.3-layer"; 4592 break; 4593 } 4594 default: 4595 { 4596 break; 4597 } 4598 } 4599 } 4600 notify->setString("ts-schema", tsSchema); 4601 } 4602 // Fall through to set up mime. 4603 } 4604 4605 default: 4606 { 4607 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 4608 // should be CodingUnused 4609 ALOGE("Raw port video compression format is %s(%d)", 4610 asString(videoDef->eCompressionFormat), 4611 videoDef->eCompressionFormat); 4612 return BAD_VALUE; 4613 } 4614 AString mime; 4615 if (GetMimeTypeForVideoCoding( 4616 videoDef->eCompressionFormat, &mime) != OK) { 4617 notify->setString("mime", "application/octet-stream"); 4618 } else { 4619 notify->setString("mime", mime.c_str()); 4620 } 4621 uint32_t intraRefreshPeriod = 0; 4622 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 4623 && intraRefreshPeriod > 0) { 4624 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 4625 } 4626 break; 4627 } 4628 } 4629 notify->setInt32("width", videoDef->nFrameWidth); 4630 notify->setInt32("height", videoDef->nFrameHeight); 4631 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 4632 portIndex == kPortIndexInput ? "input" : "output", 4633 notify->debugString().c_str()); 4634 4635 break; 4636 } 4637 4638 case OMX_PortDomainAudio: 4639 { 4640 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 4641 4642 switch ((int)audioDef->eEncoding) { 4643 case OMX_AUDIO_CodingPCM: 4644 { 4645 OMX_AUDIO_PARAM_PCMMODETYPE params; 4646 InitOMXParams(¶ms); 4647 params.nPortIndex = portIndex; 4648 4649 err = mOMX->getParameter( 4650 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4651 if (err != OK) { 4652 return err; 4653 } 4654 4655 if (params.nChannels <= 0 4656 || (params.nChannels != 1 && !params.bInterleaved) 4657 || params.nBitPerSample != 16u 4658 || params.eNumData != OMX_NumericalDataSigned 4659 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 4660 ALOGE("unsupported PCM port: %u channels%s, %u-bit, %s(%d), %s(%d) mode ", 4661 params.nChannels, 4662 params.bInterleaved ? " interleaved" : "", 4663 params.nBitPerSample, 4664 asString(params.eNumData), params.eNumData, 4665 asString(params.ePCMMode), params.ePCMMode); 4666 return FAILED_TRANSACTION; 4667 } 4668 4669 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 4670 notify->setInt32("channel-count", params.nChannels); 4671 notify->setInt32("sample-rate", params.nSamplingRate); 4672 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 4673 4674 if (mChannelMaskPresent) { 4675 notify->setInt32("channel-mask", mChannelMask); 4676 } 4677 break; 4678 } 4679 4680 case OMX_AUDIO_CodingAAC: 4681 { 4682 OMX_AUDIO_PARAM_AACPROFILETYPE params; 4683 InitOMXParams(¶ms); 4684 params.nPortIndex = portIndex; 4685 4686 err = mOMX->getParameter( 4687 mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 4688 if (err != OK) { 4689 return err; 4690 } 4691 4692 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 4693 notify->setInt32("channel-count", params.nChannels); 4694 notify->setInt32("sample-rate", params.nSampleRate); 4695 break; 4696 } 4697 4698 case OMX_AUDIO_CodingAMR: 4699 { 4700 OMX_AUDIO_PARAM_AMRTYPE params; 4701 InitOMXParams(¶ms); 4702 params.nPortIndex = portIndex; 4703 4704 err = mOMX->getParameter( 4705 mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 4706 if (err != OK) { 4707 return err; 4708 } 4709 4710 notify->setInt32("channel-count", 1); 4711 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 4712 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 4713 notify->setInt32("sample-rate", 16000); 4714 } else { 4715 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 4716 notify->setInt32("sample-rate", 8000); 4717 } 4718 break; 4719 } 4720 4721 case OMX_AUDIO_CodingFLAC: 4722 { 4723 OMX_AUDIO_PARAM_FLACTYPE params; 4724 InitOMXParams(¶ms); 4725 params.nPortIndex = portIndex; 4726 4727 err = mOMX->getParameter( 4728 mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 4729 if (err != OK) { 4730 return err; 4731 } 4732 4733 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 4734 notify->setInt32("channel-count", params.nChannels); 4735 notify->setInt32("sample-rate", params.nSampleRate); 4736 break; 4737 } 4738 4739 case OMX_AUDIO_CodingMP3: 4740 { 4741 OMX_AUDIO_PARAM_MP3TYPE params; 4742 InitOMXParams(¶ms); 4743 params.nPortIndex = portIndex; 4744 4745 err = mOMX->getParameter( 4746 mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 4747 if (err != OK) { 4748 return err; 4749 } 4750 4751 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 4752 notify->setInt32("channel-count", params.nChannels); 4753 notify->setInt32("sample-rate", params.nSampleRate); 4754 break; 4755 } 4756 4757 case OMX_AUDIO_CodingVORBIS: 4758 { 4759 OMX_AUDIO_PARAM_VORBISTYPE params; 4760 InitOMXParams(¶ms); 4761 params.nPortIndex = portIndex; 4762 4763 err = mOMX->getParameter( 4764 mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 4765 if (err != OK) { 4766 return err; 4767 } 4768 4769 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 4770 notify->setInt32("channel-count", params.nChannels); 4771 notify->setInt32("sample-rate", params.nSampleRate); 4772 break; 4773 } 4774 4775 case OMX_AUDIO_CodingAndroidAC3: 4776 { 4777 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 4778 InitOMXParams(¶ms); 4779 params.nPortIndex = portIndex; 4780 4781 err = mOMX->getParameter( 4782 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 4783 ¶ms, sizeof(params)); 4784 if (err != OK) { 4785 return err; 4786 } 4787 4788 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 4789 notify->setInt32("channel-count", params.nChannels); 4790 notify->setInt32("sample-rate", params.nSampleRate); 4791 break; 4792 } 4793 4794 case OMX_AUDIO_CodingAndroidEAC3: 4795 { 4796 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 4797 InitOMXParams(¶ms); 4798 params.nPortIndex = portIndex; 4799 4800 err = mOMX->getParameter( 4801 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 4802 ¶ms, sizeof(params)); 4803 if (err != OK) { 4804 return err; 4805 } 4806 4807 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 4808 notify->setInt32("channel-count", params.nChannels); 4809 notify->setInt32("sample-rate", params.nSampleRate); 4810 break; 4811 } 4812 4813 case OMX_AUDIO_CodingAndroidOPUS: 4814 { 4815 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 4816 InitOMXParams(¶ms); 4817 params.nPortIndex = portIndex; 4818 4819 err = mOMX->getParameter( 4820 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 4821 ¶ms, sizeof(params)); 4822 if (err != OK) { 4823 return err; 4824 } 4825 4826 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 4827 notify->setInt32("channel-count", params.nChannels); 4828 notify->setInt32("sample-rate", params.nSampleRate); 4829 break; 4830 } 4831 4832 case OMX_AUDIO_CodingG711: 4833 { 4834 OMX_AUDIO_PARAM_PCMMODETYPE params; 4835 InitOMXParams(¶ms); 4836 params.nPortIndex = portIndex; 4837 4838 err = mOMX->getParameter( 4839 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4840 if (err != OK) { 4841 return err; 4842 } 4843 4844 const char *mime = NULL; 4845 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 4846 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 4847 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 4848 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 4849 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 4850 mime = MEDIA_MIMETYPE_AUDIO_RAW; 4851 } 4852 notify->setString("mime", mime); 4853 notify->setInt32("channel-count", params.nChannels); 4854 notify->setInt32("sample-rate", params.nSamplingRate); 4855 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 4856 break; 4857 } 4858 4859 case OMX_AUDIO_CodingGSMFR: 4860 { 4861 OMX_AUDIO_PARAM_PCMMODETYPE params; 4862 InitOMXParams(¶ms); 4863 params.nPortIndex = portIndex; 4864 4865 err = mOMX->getParameter( 4866 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4867 if (err != OK) { 4868 return err; 4869 } 4870 4871 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 4872 notify->setInt32("channel-count", params.nChannels); 4873 notify->setInt32("sample-rate", params.nSamplingRate); 4874 break; 4875 } 4876 4877 default: 4878 ALOGE("Unsupported audio coding: %s(%d)\n", 4879 asString(audioDef->eEncoding), audioDef->eEncoding); 4880 return BAD_TYPE; 4881 } 4882 break; 4883 } 4884 4885 default: 4886 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 4887 return BAD_TYPE; 4888 } 4889 4890 return OK; 4891} 4892 4893void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 4894 // aspects are normally communicated in ColorAspects 4895 int32_t range, standard, transfer; 4896 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 4897 4898 // if some aspects are unspecified, use dataspace fields 4899 if (range != 0) { 4900 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 4901 } 4902 if (standard != 0) { 4903 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 4904 } 4905 if (transfer != 0) { 4906 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 4907 } 4908 4909 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 4910 if (range != 0) { 4911 mOutputFormat->setInt32("color-range", range); 4912 } 4913 if (standard != 0) { 4914 mOutputFormat->setInt32("color-standard", standard); 4915 } 4916 if (transfer != 0) { 4917 mOutputFormat->setInt32("color-transfer", transfer); 4918 } 4919 4920 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 4921 "(R:%d(%s), S:%d(%s), T:%d(%s))", 4922 dataSpace, 4923 aspects.mRange, asString(aspects.mRange), 4924 aspects.mPrimaries, asString(aspects.mPrimaries), 4925 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 4926 aspects.mTransfer, asString(aspects.mTransfer), 4927 range, asString((ColorRange)range), 4928 standard, asString((ColorStandard)standard), 4929 transfer, asString((ColorTransfer)transfer)); 4930} 4931 4932void ACodec::onOutputFormatChanged() { 4933 // store new output format, at the same time mark that this is no longer the first frame 4934 mOutputFormat = mBaseOutputFormat->dup(); 4935 4936 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 4937 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 4938 return; 4939 } 4940 4941 if (mTunneled) { 4942 sendFormatChange(); 4943 } 4944} 4945 4946void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 4947 AString mime; 4948 CHECK(mOutputFormat->findString("mime", &mime)); 4949 4950 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 4951 // notify renderer of the crop change and dataspace change 4952 // NOTE: native window uses extended right-bottom coordinate 4953 int32_t left, top, right, bottom; 4954 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 4955 notify->setRect("crop", left, top, right + 1, bottom + 1); 4956 } 4957 4958 int32_t dataSpace; 4959 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 4960 notify->setInt32("dataspace", dataSpace); 4961 } 4962 } 4963} 4964 4965void ACodec::sendFormatChange() { 4966 AString mime; 4967 CHECK(mOutputFormat->findString("mime", &mime)); 4968 4969 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 4970 int32_t channelCount; 4971 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 4972 if (mSkipCutBuffer != NULL) { 4973 size_t prevbufsize = mSkipCutBuffer->size(); 4974 if (prevbufsize != 0) { 4975 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 4976 } 4977 } 4978 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 4979 } 4980 4981 sp<AMessage> notify = mNotify->dup(); 4982 notify->setInt32("what", kWhatOutputFormatChanged); 4983 notify->setMessage("format", mOutputFormat); 4984 notify->post(); 4985 4986 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 4987 mLastOutputFormat = mOutputFormat; 4988} 4989 4990void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 4991 sp<AMessage> notify = mNotify->dup(); 4992 notify->setInt32("what", CodecBase::kWhatError); 4993 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 4994 4995 if (internalError == UNKNOWN_ERROR) { // find better error code 4996 const status_t omxStatus = statusFromOMXError(error); 4997 if (omxStatus != 0) { 4998 internalError = omxStatus; 4999 } else { 5000 ALOGW("Invalid OMX error %#x", error); 5001 } 5002 } 5003 5004 mFatalError = true; 5005 5006 notify->setInt32("err", internalError); 5007 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5008 notify->post(); 5009} 5010 5011//////////////////////////////////////////////////////////////////////////////// 5012 5013ACodec::PortDescription::PortDescription() { 5014} 5015 5016status_t ACodec::requestIDRFrame() { 5017 if (!mIsEncoder) { 5018 return ERROR_UNSUPPORTED; 5019 } 5020 5021 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5022 InitOMXParams(¶ms); 5023 5024 params.nPortIndex = kPortIndexOutput; 5025 params.IntraRefreshVOP = OMX_TRUE; 5026 5027 return mOMX->setConfig( 5028 mNode, 5029 OMX_IndexConfigVideoIntraVOPRefresh, 5030 ¶ms, 5031 sizeof(params)); 5032} 5033 5034void ACodec::PortDescription::addBuffer( 5035 IOMX::buffer_id id, const sp<ABuffer> &buffer, 5036 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) { 5037 mBufferIDs.push_back(id); 5038 mBuffers.push_back(buffer); 5039 mHandles.push_back(handle); 5040 mMemRefs.push_back(memRef); 5041} 5042 5043size_t ACodec::PortDescription::countBuffers() { 5044 return mBufferIDs.size(); 5045} 5046 5047IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5048 return mBufferIDs.itemAt(index); 5049} 5050 5051sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5052 return mBuffers.itemAt(index); 5053} 5054 5055sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const { 5056 return mHandles.itemAt(index); 5057} 5058 5059sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const { 5060 return mMemRefs.itemAt(index); 5061} 5062 5063//////////////////////////////////////////////////////////////////////////////// 5064 5065ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5066 : AState(parentState), 5067 mCodec(codec) { 5068} 5069 5070ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5071 OMX_U32 /* portIndex */) { 5072 return KEEP_BUFFERS; 5073} 5074 5075bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5076 switch (msg->what()) { 5077 case kWhatInputBufferFilled: 5078 { 5079 onInputBufferFilled(msg); 5080 break; 5081 } 5082 5083 case kWhatOutputBufferDrained: 5084 { 5085 onOutputBufferDrained(msg); 5086 break; 5087 } 5088 5089 case ACodec::kWhatOMXMessageList: 5090 { 5091 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5092 } 5093 5094 case ACodec::kWhatOMXMessageItem: 5095 { 5096 // no need to check as we already did it for kWhatOMXMessageList 5097 return onOMXMessage(msg); 5098 } 5099 5100 case ACodec::kWhatOMXMessage: 5101 { 5102 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5103 } 5104 5105 case ACodec::kWhatSetSurface: 5106 { 5107 sp<AReplyToken> replyID; 5108 CHECK(msg->senderAwaitsResponse(&replyID)); 5109 5110 sp<RefBase> obj; 5111 CHECK(msg->findObject("surface", &obj)); 5112 5113 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5114 5115 sp<AMessage> response = new AMessage; 5116 response->setInt32("err", err); 5117 response->postReply(replyID); 5118 break; 5119 } 5120 5121 case ACodec::kWhatCreateInputSurface: 5122 case ACodec::kWhatSetInputSurface: 5123 case ACodec::kWhatSignalEndOfInputStream: 5124 { 5125 // This may result in an app illegal state exception. 5126 ALOGE("Message 0x%x was not handled", msg->what()); 5127 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5128 return true; 5129 } 5130 5131 case ACodec::kWhatOMXDied: 5132 { 5133 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5134 ALOGE("OMX/mediaserver died, signalling error!"); 5135 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5136 break; 5137 } 5138 5139 case ACodec::kWhatReleaseCodecInstance: 5140 { 5141 ALOGI("[%s] forcing the release of codec", 5142 mCodec->mComponentName.c_str()); 5143 status_t err = mCodec->mOMX->freeNode(mCodec->mNode); 5144 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5145 mCodec->mComponentName.c_str(), err); 5146 sp<AMessage> notify = mCodec->mNotify->dup(); 5147 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5148 notify->post(); 5149 break; 5150 } 5151 5152 default: 5153 return false; 5154 } 5155 5156 return true; 5157} 5158 5159bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5160 // there is a possibility that this is an outstanding message for a 5161 // codec that we have already destroyed 5162 if (mCodec->mNode == 0) { 5163 ALOGI("ignoring message as already freed component: %s", 5164 msg->debugString().c_str()); 5165 return false; 5166 } 5167 5168 IOMX::node_id nodeID; 5169 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 5170 if (nodeID != mCodec->mNode) { 5171 ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode); 5172 return false; 5173 } 5174 return true; 5175} 5176 5177bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5178 sp<RefBase> obj; 5179 CHECK(msg->findObject("messages", &obj)); 5180 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5181 5182 bool receivedRenderedEvents = false; 5183 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5184 it != msgList->getList().cend(); ++it) { 5185 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5186 mCodec->handleMessage(*it); 5187 int32_t type; 5188 CHECK((*it)->findInt32("type", &type)); 5189 if (type == omx_message::FRAME_RENDERED) { 5190 receivedRenderedEvents = true; 5191 } 5192 } 5193 5194 if (receivedRenderedEvents) { 5195 // NOTE: all buffers are rendered in this case 5196 mCodec->notifyOfRenderedFrames(); 5197 } 5198 return true; 5199} 5200 5201bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5202 int32_t type; 5203 CHECK(msg->findInt32("type", &type)); 5204 5205 switch (type) { 5206 case omx_message::EVENT: 5207 { 5208 int32_t event, data1, data2; 5209 CHECK(msg->findInt32("event", &event)); 5210 CHECK(msg->findInt32("data1", &data1)); 5211 CHECK(msg->findInt32("data2", &data2)); 5212 5213 if (event == OMX_EventCmdComplete 5214 && data1 == OMX_CommandFlush 5215 && data2 == (int32_t)OMX_ALL) { 5216 // Use of this notification is not consistent across 5217 // implementations. We'll drop this notification and rely 5218 // on flush-complete notifications on the individual port 5219 // indices instead. 5220 5221 return true; 5222 } 5223 5224 return onOMXEvent( 5225 static_cast<OMX_EVENTTYPE>(event), 5226 static_cast<OMX_U32>(data1), 5227 static_cast<OMX_U32>(data2)); 5228 } 5229 5230 case omx_message::EMPTY_BUFFER_DONE: 5231 { 5232 IOMX::buffer_id bufferID; 5233 int32_t fenceFd; 5234 5235 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5236 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5237 5238 return onOMXEmptyBufferDone(bufferID, fenceFd); 5239 } 5240 5241 case omx_message::FILL_BUFFER_DONE: 5242 { 5243 IOMX::buffer_id bufferID; 5244 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5245 5246 int32_t rangeOffset, rangeLength, flags, fenceFd; 5247 int64_t timeUs; 5248 5249 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5250 CHECK(msg->findInt32("range_length", &rangeLength)); 5251 CHECK(msg->findInt32("flags", &flags)); 5252 CHECK(msg->findInt64("timestamp", &timeUs)); 5253 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5254 5255 return onOMXFillBufferDone( 5256 bufferID, 5257 (size_t)rangeOffset, (size_t)rangeLength, 5258 (OMX_U32)flags, 5259 timeUs, 5260 fenceFd); 5261 } 5262 5263 case omx_message::FRAME_RENDERED: 5264 { 5265 int64_t mediaTimeUs, systemNano; 5266 5267 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5268 CHECK(msg->findInt64("system_nano", &systemNano)); 5269 5270 return onOMXFrameRendered( 5271 mediaTimeUs, systemNano); 5272 } 5273 5274 default: 5275 ALOGE("Unexpected message type: %d", type); 5276 return false; 5277 } 5278} 5279 5280bool ACodec::BaseState::onOMXFrameRendered( 5281 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5282 // ignore outside of Executing and PortSettingsChanged states 5283 return true; 5284} 5285 5286bool ACodec::BaseState::onOMXEvent( 5287 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5288 if (event == OMX_EventDataSpaceChanged) { 5289 ColorAspects aspects; 5290 aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF); 5291 aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF); 5292 aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF); 5293 aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF); 5294 5295 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5296 return true; 5297 } 5298 5299 if (event != OMX_EventError) { 5300 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5301 mCodec->mComponentName.c_str(), event, data1, data2); 5302 5303 return false; 5304 } 5305 5306 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5307 5308 // verify OMX component sends back an error we expect. 5309 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5310 if (!isOMXError(omxError)) { 5311 ALOGW("Invalid OMX error %#x", omxError); 5312 omxError = OMX_ErrorUndefined; 5313 } 5314 mCodec->signalError(omxError); 5315 5316 return true; 5317} 5318 5319bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5320 ALOGV("[%s] onOMXEmptyBufferDone %u", 5321 mCodec->mComponentName.c_str(), bufferID); 5322 5323 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5324 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5325 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5326 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5327 mCodec->dumpBuffers(kPortIndexInput); 5328 if (fenceFd >= 0) { 5329 ::close(fenceFd); 5330 } 5331 return false; 5332 } 5333 info->mStatus = BufferInfo::OWNED_BY_US; 5334 5335 // input buffers cannot take fences, so wait for any fence now 5336 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5337 fenceFd = -1; 5338 5339 // still save fence for completeness 5340 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5341 5342 // We're in "store-metadata-in-buffers" mode, the underlying 5343 // OMX component had access to data that's implicitly refcounted 5344 // by this "MediaBuffer" object. Now that the OMX component has 5345 // told us that it's done with the input buffer, we can decrement 5346 // the mediaBuffer's reference count. 5347 info->mData->setMediaBufferBase(NULL); 5348 5349 PortMode mode = getPortMode(kPortIndexInput); 5350 5351 switch (mode) { 5352 case KEEP_BUFFERS: 5353 break; 5354 5355 case RESUBMIT_BUFFERS: 5356 postFillThisBuffer(info); 5357 break; 5358 5359 case FREE_BUFFERS: 5360 default: 5361 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5362 return false; 5363 } 5364 5365 return true; 5366} 5367 5368void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5369 if (mCodec->mPortEOS[kPortIndexInput]) { 5370 return; 5371 } 5372 5373 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5374 5375 sp<AMessage> notify = mCodec->mNotify->dup(); 5376 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5377 notify->setInt32("buffer-id", info->mBufferID); 5378 5379 info->mData->meta()->clear(); 5380 notify->setBuffer("buffer", info->mData); 5381 5382 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5383 reply->setInt32("buffer-id", info->mBufferID); 5384 5385 notify->setMessage("reply", reply); 5386 5387 notify->post(); 5388 5389 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5390} 5391 5392void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5393 IOMX::buffer_id bufferID; 5394 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5395 sp<ABuffer> buffer; 5396 int32_t err = OK; 5397 bool eos = false; 5398 PortMode mode = getPortMode(kPortIndexInput); 5399 5400 if (!msg->findBuffer("buffer", &buffer)) { 5401 /* these are unfilled buffers returned by client */ 5402 CHECK(msg->findInt32("err", &err)); 5403 5404 if (err == OK) { 5405 /* buffers with no errors are returned on MediaCodec.flush */ 5406 mode = KEEP_BUFFERS; 5407 } else { 5408 ALOGV("[%s] saw error %d instead of an input buffer", 5409 mCodec->mComponentName.c_str(), err); 5410 eos = true; 5411 } 5412 5413 buffer.clear(); 5414 } 5415 5416 int32_t tmp; 5417 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5418 eos = true; 5419 err = ERROR_END_OF_STREAM; 5420 } 5421 5422 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5423 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5424 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5425 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5426 mCodec->dumpBuffers(kPortIndexInput); 5427 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5428 return; 5429 } 5430 5431 info->mStatus = BufferInfo::OWNED_BY_US; 5432 5433 switch (mode) { 5434 case KEEP_BUFFERS: 5435 { 5436 if (eos) { 5437 if (!mCodec->mPortEOS[kPortIndexInput]) { 5438 mCodec->mPortEOS[kPortIndexInput] = true; 5439 mCodec->mInputEOSResult = err; 5440 } 5441 } 5442 break; 5443 } 5444 5445 case RESUBMIT_BUFFERS: 5446 { 5447 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5448 // Do not send empty input buffer w/o EOS to the component. 5449 if (buffer->size() == 0 && !eos) { 5450 postFillThisBuffer(info); 5451 break; 5452 } 5453 5454 int64_t timeUs; 5455 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5456 5457 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5458 5459 int32_t isCSD; 5460 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5461 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5462 } 5463 5464 if (eos) { 5465 flags |= OMX_BUFFERFLAG_EOS; 5466 } 5467 5468 if (buffer != info->mData) { 5469 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5470 mCodec->mComponentName.c_str(), 5471 bufferID, 5472 buffer.get(), info->mData.get()); 5473 5474 if (buffer->size() > info->mData->capacity()) { 5475 ALOGE("data size (%zu) is greated than buffer capacity (%zu)", 5476 buffer->size(), // this is the data received 5477 info->mData->capacity()); // this is out buffer size 5478 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5479 return; 5480 } 5481 memcpy(info->mData->data(), buffer->data(), buffer->size()); 5482 } 5483 5484 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5485 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5486 mCodec->mComponentName.c_str(), bufferID); 5487 } else if (flags & OMX_BUFFERFLAG_EOS) { 5488 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5489 mCodec->mComponentName.c_str(), bufferID); 5490 } else { 5491#if TRACK_BUFFER_TIMING 5492 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5493 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5494#else 5495 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5496 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5497#endif 5498 } 5499 5500#if TRACK_BUFFER_TIMING 5501 ACodec::BufferStats stats; 5502 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5503 stats.mFillBufferDoneTimeUs = -1ll; 5504 mCodec->mBufferStats.add(timeUs, stats); 5505#endif 5506 5507 if (mCodec->storingMetadataInDecodedBuffers()) { 5508 // try to submit an output buffer for each input buffer 5509 PortMode outputMode = getPortMode(kPortIndexOutput); 5510 5511 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 5512 mCodec->mMetadataBuffersToSubmit, 5513 (outputMode == FREE_BUFFERS ? "FREE" : 5514 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 5515 if (outputMode == RESUBMIT_BUFFERS) { 5516 mCodec->submitOutputMetadataBuffer(); 5517 } 5518 } 5519 info->checkReadFence("onInputBufferFilled"); 5520 status_t err2 = mCodec->mOMX->emptyBuffer( 5521 mCodec->mNode, 5522 bufferID, 5523 0, 5524 buffer->size(), 5525 flags, 5526 timeUs, 5527 info->mFenceFd); 5528 info->mFenceFd = -1; 5529 if (err2 != OK) { 5530 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5531 return; 5532 } 5533 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5534 5535 if (!eos && err == OK) { 5536 getMoreInputDataIfPossible(); 5537 } else { 5538 ALOGV("[%s] Signalled EOS (%d) on the input port", 5539 mCodec->mComponentName.c_str(), err); 5540 5541 mCodec->mPortEOS[kPortIndexInput] = true; 5542 mCodec->mInputEOSResult = err; 5543 } 5544 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 5545 if (err != OK && err != ERROR_END_OF_STREAM) { 5546 ALOGV("[%s] Signalling EOS on the input port due to error %d", 5547 mCodec->mComponentName.c_str(), err); 5548 } else { 5549 ALOGV("[%s] Signalling EOS on the input port", 5550 mCodec->mComponentName.c_str()); 5551 } 5552 5553 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 5554 mCodec->mComponentName.c_str(), bufferID); 5555 5556 info->checkReadFence("onInputBufferFilled"); 5557 status_t err2 = mCodec->mOMX->emptyBuffer( 5558 mCodec->mNode, 5559 bufferID, 5560 0, 5561 0, 5562 OMX_BUFFERFLAG_EOS, 5563 0, 5564 info->mFenceFd); 5565 info->mFenceFd = -1; 5566 if (err2 != OK) { 5567 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5568 return; 5569 } 5570 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5571 5572 mCodec->mPortEOS[kPortIndexInput] = true; 5573 mCodec->mInputEOSResult = err; 5574 } 5575 break; 5576 } 5577 5578 case FREE_BUFFERS: 5579 break; 5580 5581 default: 5582 ALOGE("invalid port mode: %d", mode); 5583 break; 5584 } 5585} 5586 5587void ACodec::BaseState::getMoreInputDataIfPossible() { 5588 if (mCodec->mPortEOS[kPortIndexInput]) { 5589 return; 5590 } 5591 5592 BufferInfo *eligible = NULL; 5593 5594 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 5595 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5596 5597#if 0 5598 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 5599 // There's already a "read" pending. 5600 return; 5601 } 5602#endif 5603 5604 if (info->mStatus == BufferInfo::OWNED_BY_US) { 5605 eligible = info; 5606 } 5607 } 5608 5609 if (eligible == NULL) { 5610 return; 5611 } 5612 5613 postFillThisBuffer(eligible); 5614} 5615 5616bool ACodec::BaseState::onOMXFillBufferDone( 5617 IOMX::buffer_id bufferID, 5618 size_t rangeOffset, size_t rangeLength, 5619 OMX_U32 flags, 5620 int64_t timeUs, 5621 int fenceFd) { 5622 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 5623 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 5624 5625 ssize_t index; 5626 status_t err= OK; 5627 5628#if TRACK_BUFFER_TIMING 5629 index = mCodec->mBufferStats.indexOfKey(timeUs); 5630 if (index >= 0) { 5631 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 5632 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 5633 5634 ALOGI("frame PTS %lld: %lld", 5635 timeUs, 5636 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 5637 5638 mCodec->mBufferStats.removeItemsAt(index); 5639 stats = NULL; 5640 } 5641#endif 5642 5643 BufferInfo *info = 5644 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5645 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5646 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5647 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5648 mCodec->dumpBuffers(kPortIndexOutput); 5649 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5650 if (fenceFd >= 0) { 5651 ::close(fenceFd); 5652 } 5653 return true; 5654 } 5655 5656 info->mDequeuedAt = ++mCodec->mDequeueCounter; 5657 info->mStatus = BufferInfo::OWNED_BY_US; 5658 5659 if (info->mRenderInfo != NULL) { 5660 // The fence for an emptied buffer must have signaled, but there still could be queued 5661 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 5662 // as we will soon requeue this buffer to the surface. While in theory we could still keep 5663 // track of buffers that are requeued to the surface, it is better to add support to the 5664 // buffer-queue to notify us of released buffers and their fences (in the future). 5665 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 5666 } 5667 5668 // byte buffers cannot take fences, so wait for any fence now 5669 if (mCodec->mNativeWindow == NULL) { 5670 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 5671 fenceFd = -1; 5672 } 5673 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 5674 5675 PortMode mode = getPortMode(kPortIndexOutput); 5676 5677 switch (mode) { 5678 case KEEP_BUFFERS: 5679 break; 5680 5681 case RESUBMIT_BUFFERS: 5682 { 5683 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 5684 || mCodec->mPortEOS[kPortIndexOutput])) { 5685 ALOGV("[%s] calling fillBuffer %u", 5686 mCodec->mComponentName.c_str(), info->mBufferID); 5687 5688 err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 5689 info->mFenceFd = -1; 5690 if (err != OK) { 5691 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5692 return true; 5693 } 5694 5695 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5696 break; 5697 } 5698 5699 sp<AMessage> reply = 5700 new AMessage(kWhatOutputBufferDrained, mCodec); 5701 5702 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 5703 // pretend that output format has changed on the first frame (we used to do this) 5704 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) { 5705 mCodec->onOutputFormatChanged(); 5706 } 5707 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 5708 mCodec->sendFormatChange(); 5709 } 5710 5711 if (mCodec->usingMetadataOnEncoderOutput()) { 5712 native_handle_t *handle = NULL; 5713 VideoGrallocMetadata &grallocMeta = *(VideoGrallocMetadata *)info->mData->data(); 5714 VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)info->mData->data(); 5715 if (info->mData->size() >= sizeof(grallocMeta) 5716 && grallocMeta.eType == kMetadataBufferTypeGrallocSource) { 5717 handle = (native_handle_t *)(uintptr_t)grallocMeta.pHandle; 5718 } else if (info->mData->size() >= sizeof(nativeMeta) 5719 && nativeMeta.eType == kMetadataBufferTypeANWBuffer) { 5720#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5721 // ANativeWindowBuffer is only valid on 32-bit/mediaserver process 5722 handle = NULL; 5723#else 5724 handle = (native_handle_t *)nativeMeta.pBuffer->handle; 5725#endif 5726 } 5727 info->mData->meta()->setPointer("handle", handle); 5728 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 5729 info->mData->meta()->setInt32("rangeLength", rangeLength); 5730 } else { 5731 info->mData->setRange(rangeOffset, rangeLength); 5732 } 5733#if 0 5734 if (mCodec->mNativeWindow == NULL) { 5735 if (IsIDR(info->mData)) { 5736 ALOGI("IDR frame"); 5737 } 5738 } 5739#endif 5740 5741 if (mCodec->mSkipCutBuffer != NULL) { 5742 mCodec->mSkipCutBuffer->submit(info->mData); 5743 } 5744 info->mData->meta()->setInt64("timeUs", timeUs); 5745 5746 sp<AMessage> notify = mCodec->mNotify->dup(); 5747 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 5748 notify->setInt32("buffer-id", info->mBufferID); 5749 notify->setBuffer("buffer", info->mData); 5750 notify->setInt32("flags", flags); 5751 5752 reply->setInt32("buffer-id", info->mBufferID); 5753 5754 notify->setMessage("reply", reply); 5755 5756 notify->post(); 5757 5758 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 5759 5760 if (flags & OMX_BUFFERFLAG_EOS) { 5761 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 5762 5763 sp<AMessage> notify = mCodec->mNotify->dup(); 5764 notify->setInt32("what", CodecBase::kWhatEOS); 5765 notify->setInt32("err", mCodec->mInputEOSResult); 5766 notify->post(); 5767 5768 mCodec->mPortEOS[kPortIndexOutput] = true; 5769 } 5770 break; 5771 } 5772 5773 case FREE_BUFFERS: 5774 err = mCodec->freeBuffer(kPortIndexOutput, index); 5775 if (err != OK) { 5776 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5777 return true; 5778 } 5779 break; 5780 5781 default: 5782 ALOGE("Invalid port mode: %d", mode); 5783 return false; 5784 } 5785 5786 return true; 5787} 5788 5789void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 5790 IOMX::buffer_id bufferID; 5791 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5792 ssize_t index; 5793 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5794 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5795 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 5796 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5797 mCodec->dumpBuffers(kPortIndexOutput); 5798 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5799 return; 5800 } 5801 5802 android_native_rect_t crop; 5803 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom)) { 5804 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 5805 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 5806 } 5807 5808 int32_t dataSpace; 5809 if (msg->findInt32("dataspace", &dataSpace)) { 5810 status_t err = native_window_set_buffers_data_space( 5811 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 5812 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 5813 } 5814 5815 int32_t render; 5816 if (mCodec->mNativeWindow != NULL 5817 && msg->findInt32("render", &render) && render != 0 5818 && info->mData != NULL && info->mData->size() != 0) { 5819 ATRACE_NAME("render"); 5820 // The client wants this buffer to be rendered. 5821 5822 // save buffers sent to the surface so we can get render time when they return 5823 int64_t mediaTimeUs = -1; 5824 info->mData->meta()->findInt64("timeUs", &mediaTimeUs); 5825 if (mediaTimeUs >= 0) { 5826 mCodec->mRenderTracker.onFrameQueued( 5827 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 5828 } 5829 5830 int64_t timestampNs = 0; 5831 if (!msg->findInt64("timestampNs", ×tampNs)) { 5832 // use media timestamp if client did not request a specific render timestamp 5833 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 5834 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 5835 timestampNs *= 1000; 5836 } 5837 } 5838 5839 status_t err; 5840 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 5841 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 5842 5843 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 5844 err = mCodec->mNativeWindow->queueBuffer( 5845 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 5846 info->mFenceFd = -1; 5847 if (err == OK) { 5848 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 5849 } else { 5850 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 5851 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5852 info->mStatus = BufferInfo::OWNED_BY_US; 5853 // keeping read fence as write fence to avoid clobbering 5854 info->mIsReadFence = false; 5855 } 5856 } else { 5857 if (mCodec->mNativeWindow != NULL && 5858 (info->mData == NULL || info->mData->size() != 0)) { 5859 // move read fence into write fence to avoid clobbering 5860 info->mIsReadFence = false; 5861 ATRACE_NAME("frame-drop"); 5862 } 5863 info->mStatus = BufferInfo::OWNED_BY_US; 5864 } 5865 5866 PortMode mode = getPortMode(kPortIndexOutput); 5867 5868 switch (mode) { 5869 case KEEP_BUFFERS: 5870 { 5871 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 5872 5873 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 5874 // We cannot resubmit the buffer we just rendered, dequeue 5875 // the spare instead. 5876 5877 info = mCodec->dequeueBufferFromNativeWindow(); 5878 } 5879 break; 5880 } 5881 5882 case RESUBMIT_BUFFERS: 5883 { 5884 if (!mCodec->mPortEOS[kPortIndexOutput]) { 5885 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 5886 // We cannot resubmit the buffer we just rendered, dequeue 5887 // the spare instead. 5888 5889 info = mCodec->dequeueBufferFromNativeWindow(); 5890 } 5891 5892 if (info != NULL) { 5893 ALOGV("[%s] calling fillBuffer %u", 5894 mCodec->mComponentName.c_str(), info->mBufferID); 5895 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 5896 status_t err = mCodec->mOMX->fillBuffer( 5897 mCodec->mNode, info->mBufferID, info->mFenceFd); 5898 info->mFenceFd = -1; 5899 if (err == OK) { 5900 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5901 } else { 5902 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5903 } 5904 } 5905 } 5906 break; 5907 } 5908 5909 case FREE_BUFFERS: 5910 { 5911 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 5912 if (err != OK) { 5913 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5914 } 5915 break; 5916 } 5917 5918 default: 5919 ALOGE("Invalid port mode: %d", mode); 5920 return; 5921 } 5922} 5923 5924//////////////////////////////////////////////////////////////////////////////// 5925 5926ACodec::UninitializedState::UninitializedState(ACodec *codec) 5927 : BaseState(codec) { 5928} 5929 5930void ACodec::UninitializedState::stateEntered() { 5931 ALOGV("Now uninitialized"); 5932 5933 if (mDeathNotifier != NULL) { 5934 mCodec->mNodeBinder->unlinkToDeath(mDeathNotifier); 5935 mDeathNotifier.clear(); 5936 } 5937 5938 mCodec->mUsingNativeWindow = false; 5939 mCodec->mNativeWindow.clear(); 5940 mCodec->mNativeWindowUsageBits = 0; 5941 mCodec->mNode = 0; 5942 mCodec->mOMX.clear(); 5943 mCodec->mQuirks = 0; 5944 mCodec->mFlags = 0; 5945 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 5946 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 5947 mCodec->mComponentName.clear(); 5948} 5949 5950bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 5951 bool handled = false; 5952 5953 switch (msg->what()) { 5954 case ACodec::kWhatSetup: 5955 { 5956 onSetup(msg); 5957 5958 handled = true; 5959 break; 5960 } 5961 5962 case ACodec::kWhatAllocateComponent: 5963 { 5964 onAllocateComponent(msg); 5965 handled = true; 5966 break; 5967 } 5968 5969 case ACodec::kWhatShutdown: 5970 { 5971 int32_t keepComponentAllocated; 5972 CHECK(msg->findInt32( 5973 "keepComponentAllocated", &keepComponentAllocated)); 5974 ALOGW_IF(keepComponentAllocated, 5975 "cannot keep component allocated on shutdown in Uninitialized state"); 5976 5977 sp<AMessage> notify = mCodec->mNotify->dup(); 5978 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5979 notify->post(); 5980 5981 handled = true; 5982 break; 5983 } 5984 5985 case ACodec::kWhatFlush: 5986 { 5987 sp<AMessage> notify = mCodec->mNotify->dup(); 5988 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 5989 notify->post(); 5990 5991 handled = true; 5992 break; 5993 } 5994 5995 case ACodec::kWhatReleaseCodecInstance: 5996 { 5997 // nothing to do, as we have already signaled shutdown 5998 handled = true; 5999 break; 6000 } 6001 6002 default: 6003 return BaseState::onMessageReceived(msg); 6004 } 6005 6006 return handled; 6007} 6008 6009void ACodec::UninitializedState::onSetup( 6010 const sp<AMessage> &msg) { 6011 if (onAllocateComponent(msg) 6012 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6013 mCodec->mLoadedState->onStart(); 6014 } 6015} 6016 6017bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6018 ALOGV("onAllocateComponent"); 6019 6020 CHECK(mCodec->mNode == 0); 6021 6022 OMXClient client; 6023 if (client.connect() != OK) { 6024 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6025 return false; 6026 } 6027 6028 sp<IOMX> omx = client.interface(); 6029 6030 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6031 6032 Vector<AString> matchingCodecs; 6033 6034 AString mime; 6035 6036 AString componentName; 6037 uint32_t quirks = 0; 6038 int32_t encoder = false; 6039 if (msg->findString("componentName", &componentName)) { 6040 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6041 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6042 matchingCodecs.add(componentName); 6043 } 6044 } else { 6045 CHECK(msg->findString("mime", &mime)); 6046 6047 if (!msg->findInt32("encoder", &encoder)) { 6048 encoder = false; 6049 } 6050 6051 MediaCodecList::findMatchingCodecs( 6052 mime.c_str(), 6053 encoder, // createEncoder 6054 0, // flags 6055 &matchingCodecs); 6056 } 6057 6058 sp<CodecObserver> observer = new CodecObserver; 6059 IOMX::node_id node = 0; 6060 6061 status_t err = NAME_NOT_FOUND; 6062 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6063 ++matchIndex) { 6064 componentName = matchingCodecs[matchIndex]; 6065 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6066 6067 pid_t tid = gettid(); 6068 int prevPriority = androidGetThreadPriority(tid); 6069 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6070 err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node); 6071 androidSetThreadPriority(tid, prevPriority); 6072 6073 if (err == OK) { 6074 break; 6075 } else { 6076 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6077 } 6078 6079 node = 0; 6080 } 6081 6082 if (node == 0) { 6083 if (!mime.empty()) { 6084 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6085 encoder ? "en" : "de", mime.c_str(), err); 6086 } else { 6087 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6088 } 6089 6090 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6091 return false; 6092 } 6093 6094 mDeathNotifier = new DeathNotifier(notify); 6095 if (mCodec->mNodeBinder == NULL || 6096 mCodec->mNodeBinder->linkToDeath(mDeathNotifier) != OK) { 6097 // This was a local binder, if it dies so do we, we won't care 6098 // about any notifications in the afterlife. 6099 mDeathNotifier.clear(); 6100 } 6101 6102 notify = new AMessage(kWhatOMXMessageList, mCodec); 6103 observer->setNotificationMessage(notify); 6104 6105 mCodec->mComponentName = componentName; 6106 mCodec->mRenderTracker.setComponentName(componentName); 6107 mCodec->mFlags = 0; 6108 6109 if (componentName.endsWith(".secure")) { 6110 mCodec->mFlags |= kFlagIsSecure; 6111 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6112 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6113 } 6114 6115 mCodec->mQuirks = quirks; 6116 mCodec->mOMX = omx; 6117 mCodec->mNode = node; 6118 6119 { 6120 sp<AMessage> notify = mCodec->mNotify->dup(); 6121 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6122 notify->setString("componentName", mCodec->mComponentName.c_str()); 6123 notify->post(); 6124 } 6125 6126 mCodec->changeState(mCodec->mLoadedState); 6127 6128 return true; 6129} 6130 6131//////////////////////////////////////////////////////////////////////////////// 6132 6133ACodec::LoadedState::LoadedState(ACodec *codec) 6134 : BaseState(codec) { 6135} 6136 6137void ACodec::LoadedState::stateEntered() { 6138 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6139 6140 mCodec->mPortEOS[kPortIndexInput] = 6141 mCodec->mPortEOS[kPortIndexOutput] = false; 6142 6143 mCodec->mInputEOSResult = OK; 6144 6145 mCodec->mDequeueCounter = 0; 6146 mCodec->mMetadataBuffersToSubmit = 0; 6147 mCodec->mRepeatFrameDelayUs = -1ll; 6148 mCodec->mInputFormat.clear(); 6149 mCodec->mOutputFormat.clear(); 6150 mCodec->mBaseOutputFormat.clear(); 6151 6152 if (mCodec->mShutdownInProgress) { 6153 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6154 6155 mCodec->mShutdownInProgress = false; 6156 mCodec->mKeepComponentAllocated = false; 6157 6158 onShutdown(keepComponentAllocated); 6159 } 6160 mCodec->mExplicitShutdown = false; 6161 6162 mCodec->processDeferredMessages(); 6163} 6164 6165void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6166 if (!keepComponentAllocated) { 6167 (void)mCodec->mOMX->freeNode(mCodec->mNode); 6168 6169 mCodec->changeState(mCodec->mUninitializedState); 6170 } 6171 6172 if (mCodec->mExplicitShutdown) { 6173 sp<AMessage> notify = mCodec->mNotify->dup(); 6174 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6175 notify->post(); 6176 mCodec->mExplicitShutdown = false; 6177 } 6178} 6179 6180bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6181 bool handled = false; 6182 6183 switch (msg->what()) { 6184 case ACodec::kWhatConfigureComponent: 6185 { 6186 onConfigureComponent(msg); 6187 handled = true; 6188 break; 6189 } 6190 6191 case ACodec::kWhatCreateInputSurface: 6192 { 6193 onCreateInputSurface(msg); 6194 handled = true; 6195 break; 6196 } 6197 6198 case ACodec::kWhatSetInputSurface: 6199 { 6200 onSetInputSurface(msg); 6201 handled = true; 6202 break; 6203 } 6204 6205 case ACodec::kWhatStart: 6206 { 6207 onStart(); 6208 handled = true; 6209 break; 6210 } 6211 6212 case ACodec::kWhatShutdown: 6213 { 6214 int32_t keepComponentAllocated; 6215 CHECK(msg->findInt32( 6216 "keepComponentAllocated", &keepComponentAllocated)); 6217 6218 mCodec->mExplicitShutdown = true; 6219 onShutdown(keepComponentAllocated); 6220 6221 handled = true; 6222 break; 6223 } 6224 6225 case ACodec::kWhatFlush: 6226 { 6227 sp<AMessage> notify = mCodec->mNotify->dup(); 6228 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6229 notify->post(); 6230 6231 handled = true; 6232 break; 6233 } 6234 6235 default: 6236 return BaseState::onMessageReceived(msg); 6237 } 6238 6239 return handled; 6240} 6241 6242bool ACodec::LoadedState::onConfigureComponent( 6243 const sp<AMessage> &msg) { 6244 ALOGV("onConfigureComponent"); 6245 6246 CHECK(mCodec->mNode != 0); 6247 6248 status_t err = OK; 6249 AString mime; 6250 if (!msg->findString("mime", &mime)) { 6251 err = BAD_VALUE; 6252 } else { 6253 err = mCodec->configureCodec(mime.c_str(), msg); 6254 } 6255 if (err != OK) { 6256 ALOGE("[%s] configureCodec returning error %d", 6257 mCodec->mComponentName.c_str(), err); 6258 6259 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6260 return false; 6261 } 6262 6263 { 6264 sp<AMessage> notify = mCodec->mNotify->dup(); 6265 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6266 notify->setMessage("input-format", mCodec->mInputFormat); 6267 notify->setMessage("output-format", mCodec->mOutputFormat); 6268 notify->post(); 6269 } 6270 6271 return true; 6272} 6273 6274status_t ACodec::LoadedState::setupInputSurface() { 6275 status_t err = OK; 6276 6277 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6278 err = mCodec->mOMX->setInternalOption( 6279 mCodec->mNode, 6280 kPortIndexInput, 6281 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 6282 &mCodec->mRepeatFrameDelayUs, 6283 sizeof(mCodec->mRepeatFrameDelayUs)); 6284 6285 if (err != OK) { 6286 ALOGE("[%s] Unable to configure option to repeat previous " 6287 "frames (err %d)", 6288 mCodec->mComponentName.c_str(), 6289 err); 6290 return err; 6291 } 6292 } 6293 6294 if (mCodec->mMaxPtsGapUs > 0ll) { 6295 err = mCodec->mOMX->setInternalOption( 6296 mCodec->mNode, 6297 kPortIndexInput, 6298 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 6299 &mCodec->mMaxPtsGapUs, 6300 sizeof(mCodec->mMaxPtsGapUs)); 6301 6302 if (err != OK) { 6303 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6304 mCodec->mComponentName.c_str(), 6305 err); 6306 return err; 6307 } 6308 } 6309 6310 if (mCodec->mMaxFps > 0) { 6311 err = mCodec->mOMX->setInternalOption( 6312 mCodec->mNode, 6313 kPortIndexInput, 6314 IOMX::INTERNAL_OPTION_MAX_FPS, 6315 &mCodec->mMaxFps, 6316 sizeof(mCodec->mMaxFps)); 6317 6318 if (err != OK) { 6319 ALOGE("[%s] Unable to configure max fps (err %d)", 6320 mCodec->mComponentName.c_str(), 6321 err); 6322 return err; 6323 } 6324 } 6325 6326 if (mCodec->mTimePerCaptureUs > 0ll 6327 && mCodec->mTimePerFrameUs > 0ll) { 6328 int64_t timeLapse[2]; 6329 timeLapse[0] = mCodec->mTimePerFrameUs; 6330 timeLapse[1] = mCodec->mTimePerCaptureUs; 6331 err = mCodec->mOMX->setInternalOption( 6332 mCodec->mNode, 6333 kPortIndexInput, 6334 IOMX::INTERNAL_OPTION_TIME_LAPSE, 6335 &timeLapse[0], 6336 sizeof(timeLapse)); 6337 6338 if (err != OK) { 6339 ALOGE("[%s] Unable to configure time lapse (err %d)", 6340 mCodec->mComponentName.c_str(), 6341 err); 6342 return err; 6343 } 6344 } 6345 6346 if (mCodec->mCreateInputBuffersSuspended) { 6347 bool suspend = true; 6348 err = mCodec->mOMX->setInternalOption( 6349 mCodec->mNode, 6350 kPortIndexInput, 6351 IOMX::INTERNAL_OPTION_SUSPEND, 6352 &suspend, 6353 sizeof(suspend)); 6354 6355 if (err != OK) { 6356 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6357 mCodec->mComponentName.c_str(), 6358 err); 6359 return err; 6360 } 6361 } 6362 6363 uint32_t usageBits; 6364 if (mCodec->mOMX->getParameter( 6365 mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6366 &usageBits, sizeof(usageBits)) == OK) { 6367 mCodec->mInputFormat->setInt32( 6368 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6369 } 6370 6371 sp<ABuffer> colorAspectsBuffer; 6372 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6373 err = mCodec->mOMX->setInternalOption( 6374 mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS, 6375 colorAspectsBuffer->base(), colorAspectsBuffer->capacity()); 6376 if (err != OK) { 6377 ALOGE("[%s] Unable to configure color aspects (err %d)", 6378 mCodec->mComponentName.c_str(), err); 6379 return err; 6380 } 6381 } 6382 return OK; 6383} 6384 6385void ACodec::LoadedState::onCreateInputSurface( 6386 const sp<AMessage> & /* msg */) { 6387 ALOGV("onCreateInputSurface"); 6388 6389 sp<AMessage> notify = mCodec->mNotify->dup(); 6390 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6391 6392 android_dataspace dataSpace; 6393 status_t err = 6394 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6395 notify->setMessage("input-format", mCodec->mInputFormat); 6396 notify->setMessage("output-format", mCodec->mOutputFormat); 6397 6398 sp<IGraphicBufferProducer> bufferProducer; 6399 if (err == OK) { 6400 err = mCodec->mOMX->createInputSurface( 6401 mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, &mCodec->mInputMetadataType); 6402 } 6403 6404 if (err == OK) { 6405 err = setupInputSurface(); 6406 } 6407 6408 if (err == OK) { 6409 notify->setObject("input-surface", 6410 new BufferProducerWrapper(bufferProducer)); 6411 } else { 6412 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6413 // the error through because it's in the "configured" state. We 6414 // send a kWhatInputSurfaceCreated with an error value instead. 6415 ALOGE("[%s] onCreateInputSurface returning error %d", 6416 mCodec->mComponentName.c_str(), err); 6417 notify->setInt32("err", err); 6418 } 6419 notify->post(); 6420} 6421 6422void ACodec::LoadedState::onSetInputSurface( 6423 const sp<AMessage> &msg) { 6424 ALOGV("onSetInputSurface"); 6425 6426 sp<AMessage> notify = mCodec->mNotify->dup(); 6427 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6428 6429 sp<RefBase> obj; 6430 CHECK(msg->findObject("input-surface", &obj)); 6431 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6432 6433 android_dataspace dataSpace; 6434 status_t err = 6435 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6436 notify->setMessage("input-format", mCodec->mInputFormat); 6437 notify->setMessage("output-format", mCodec->mOutputFormat); 6438 6439 if (err == OK) { 6440 err = mCodec->mOMX->setInputSurface( 6441 mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(), 6442 &mCodec->mInputMetadataType); 6443 } 6444 6445 if (err == OK) { 6446 surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace); 6447 err = setupInputSurface(); 6448 } 6449 6450 if (err != OK) { 6451 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6452 // the error through because it's in the "configured" state. We 6453 // send a kWhatInputSurfaceAccepted with an error value instead. 6454 ALOGE("[%s] onSetInputSurface returning error %d", 6455 mCodec->mComponentName.c_str(), err); 6456 notify->setInt32("err", err); 6457 } 6458 notify->post(); 6459} 6460 6461void ACodec::LoadedState::onStart() { 6462 ALOGV("onStart"); 6463 6464 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6465 if (err != OK) { 6466 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6467 } else { 6468 mCodec->changeState(mCodec->mLoadedToIdleState); 6469 } 6470} 6471 6472//////////////////////////////////////////////////////////////////////////////// 6473 6474ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 6475 : BaseState(codec) { 6476} 6477 6478void ACodec::LoadedToIdleState::stateEntered() { 6479 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 6480 6481 status_t err; 6482 if ((err = allocateBuffers()) != OK) { 6483 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 6484 "(error 0x%08x)", 6485 err); 6486 6487 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6488 6489 mCodec->mOMX->sendCommand( 6490 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 6491 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 6492 mCodec->freeBuffersOnPort(kPortIndexInput); 6493 } 6494 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 6495 mCodec->freeBuffersOnPort(kPortIndexOutput); 6496 } 6497 6498 mCodec->changeState(mCodec->mLoadedState); 6499 } 6500} 6501 6502status_t ACodec::LoadedToIdleState::allocateBuffers() { 6503 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 6504 6505 if (err != OK) { 6506 return err; 6507 } 6508 6509 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 6510} 6511 6512bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 6513 switch (msg->what()) { 6514 case kWhatSetParameters: 6515 case kWhatShutdown: 6516 { 6517 mCodec->deferMessage(msg); 6518 return true; 6519 } 6520 6521 case kWhatSignalEndOfInputStream: 6522 { 6523 mCodec->onSignalEndOfInputStream(); 6524 return true; 6525 } 6526 6527 case kWhatResume: 6528 { 6529 // We'll be active soon enough. 6530 return true; 6531 } 6532 6533 case kWhatFlush: 6534 { 6535 // We haven't even started yet, so we're flushed alright... 6536 sp<AMessage> notify = mCodec->mNotify->dup(); 6537 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6538 notify->post(); 6539 return true; 6540 } 6541 6542 default: 6543 return BaseState::onMessageReceived(msg); 6544 } 6545} 6546 6547bool ACodec::LoadedToIdleState::onOMXEvent( 6548 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6549 switch (event) { 6550 case OMX_EventCmdComplete: 6551 { 6552 status_t err = OK; 6553 if (data1 != (OMX_U32)OMX_CommandStateSet 6554 || data2 != (OMX_U32)OMX_StateIdle) { 6555 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 6556 asString((OMX_COMMANDTYPE)data1), data1, 6557 asString((OMX_STATETYPE)data2), data2); 6558 err = FAILED_TRANSACTION; 6559 } 6560 6561 if (err == OK) { 6562 err = mCodec->mOMX->sendCommand( 6563 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting); 6564 } 6565 6566 if (err != OK) { 6567 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6568 } else { 6569 mCodec->changeState(mCodec->mIdleToExecutingState); 6570 } 6571 6572 return true; 6573 } 6574 6575 default: 6576 return BaseState::onOMXEvent(event, data1, data2); 6577 } 6578} 6579 6580//////////////////////////////////////////////////////////////////////////////// 6581 6582ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 6583 : BaseState(codec) { 6584} 6585 6586void ACodec::IdleToExecutingState::stateEntered() { 6587 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 6588} 6589 6590bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6591 switch (msg->what()) { 6592 case kWhatSetParameters: 6593 case kWhatShutdown: 6594 { 6595 mCodec->deferMessage(msg); 6596 return true; 6597 } 6598 6599 case kWhatResume: 6600 { 6601 // We'll be active soon enough. 6602 return true; 6603 } 6604 6605 case kWhatFlush: 6606 { 6607 // We haven't even started yet, so we're flushed alright... 6608 sp<AMessage> notify = mCodec->mNotify->dup(); 6609 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6610 notify->post(); 6611 6612 return true; 6613 } 6614 6615 case kWhatSignalEndOfInputStream: 6616 { 6617 mCodec->onSignalEndOfInputStream(); 6618 return true; 6619 } 6620 6621 default: 6622 return BaseState::onMessageReceived(msg); 6623 } 6624} 6625 6626bool ACodec::IdleToExecutingState::onOMXEvent( 6627 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6628 switch (event) { 6629 case OMX_EventCmdComplete: 6630 { 6631 if (data1 != (OMX_U32)OMX_CommandStateSet 6632 || data2 != (OMX_U32)OMX_StateExecuting) { 6633 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 6634 asString((OMX_COMMANDTYPE)data1), data1, 6635 asString((OMX_STATETYPE)data2), data2); 6636 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6637 return true; 6638 } 6639 6640 mCodec->mExecutingState->resume(); 6641 mCodec->changeState(mCodec->mExecutingState); 6642 6643 return true; 6644 } 6645 6646 default: 6647 return BaseState::onOMXEvent(event, data1, data2); 6648 } 6649} 6650 6651//////////////////////////////////////////////////////////////////////////////// 6652 6653ACodec::ExecutingState::ExecutingState(ACodec *codec) 6654 : BaseState(codec), 6655 mActive(false) { 6656} 6657 6658ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 6659 OMX_U32 /* portIndex */) { 6660 return RESUBMIT_BUFFERS; 6661} 6662 6663void ACodec::ExecutingState::submitOutputMetaBuffers() { 6664 // submit as many buffers as there are input buffers with the codec 6665 // in case we are in port reconfiguring 6666 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 6667 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6668 6669 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 6670 if (mCodec->submitOutputMetadataBuffer() != OK) 6671 break; 6672 } 6673 } 6674 6675 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 6676 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 6677} 6678 6679void ACodec::ExecutingState::submitRegularOutputBuffers() { 6680 bool failed = false; 6681 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 6682 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 6683 6684 if (mCodec->mNativeWindow != NULL) { 6685 if (info->mStatus != BufferInfo::OWNED_BY_US 6686 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6687 ALOGE("buffers should be owned by us or the surface"); 6688 failed = true; 6689 break; 6690 } 6691 6692 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6693 continue; 6694 } 6695 } else { 6696 if (info->mStatus != BufferInfo::OWNED_BY_US) { 6697 ALOGE("buffers should be owned by us"); 6698 failed = true; 6699 break; 6700 } 6701 } 6702 6703 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 6704 6705 info->checkWriteFence("submitRegularOutputBuffers"); 6706 status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 6707 info->mFenceFd = -1; 6708 if (err != OK) { 6709 failed = true; 6710 break; 6711 } 6712 6713 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6714 } 6715 6716 if (failed) { 6717 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6718 } 6719} 6720 6721void ACodec::ExecutingState::submitOutputBuffers() { 6722 submitRegularOutputBuffers(); 6723 if (mCodec->storingMetadataInDecodedBuffers()) { 6724 submitOutputMetaBuffers(); 6725 } 6726} 6727 6728void ACodec::ExecutingState::resume() { 6729 if (mActive) { 6730 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 6731 return; 6732 } 6733 6734 submitOutputBuffers(); 6735 6736 // Post all available input buffers 6737 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 6738 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 6739 } 6740 6741 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 6742 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6743 if (info->mStatus == BufferInfo::OWNED_BY_US) { 6744 postFillThisBuffer(info); 6745 } 6746 } 6747 6748 mActive = true; 6749} 6750 6751void ACodec::ExecutingState::stateEntered() { 6752 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 6753 6754 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 6755 mCodec->processDeferredMessages(); 6756} 6757 6758bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6759 bool handled = false; 6760 6761 switch (msg->what()) { 6762 case kWhatShutdown: 6763 { 6764 int32_t keepComponentAllocated; 6765 CHECK(msg->findInt32( 6766 "keepComponentAllocated", &keepComponentAllocated)); 6767 6768 mCodec->mShutdownInProgress = true; 6769 mCodec->mExplicitShutdown = true; 6770 mCodec->mKeepComponentAllocated = keepComponentAllocated; 6771 6772 mActive = false; 6773 6774 status_t err = mCodec->mOMX->sendCommand( 6775 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6776 if (err != OK) { 6777 if (keepComponentAllocated) { 6778 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6779 } 6780 // TODO: do some recovery here. 6781 } else { 6782 mCodec->changeState(mCodec->mExecutingToIdleState); 6783 } 6784 6785 handled = true; 6786 break; 6787 } 6788 6789 case kWhatFlush: 6790 { 6791 ALOGV("[%s] ExecutingState flushing now " 6792 "(codec owns %zu/%zu input, %zu/%zu output).", 6793 mCodec->mComponentName.c_str(), 6794 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 6795 mCodec->mBuffers[kPortIndexInput].size(), 6796 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 6797 mCodec->mBuffers[kPortIndexOutput].size()); 6798 6799 mActive = false; 6800 6801 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL); 6802 if (err != OK) { 6803 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6804 } else { 6805 mCodec->changeState(mCodec->mFlushingState); 6806 } 6807 6808 handled = true; 6809 break; 6810 } 6811 6812 case kWhatResume: 6813 { 6814 resume(); 6815 6816 handled = true; 6817 break; 6818 } 6819 6820 case kWhatRequestIDRFrame: 6821 { 6822 status_t err = mCodec->requestIDRFrame(); 6823 if (err != OK) { 6824 ALOGW("Requesting an IDR frame failed."); 6825 } 6826 6827 handled = true; 6828 break; 6829 } 6830 6831 case kWhatSetParameters: 6832 { 6833 sp<AMessage> params; 6834 CHECK(msg->findMessage("params", ¶ms)); 6835 6836 status_t err = mCodec->setParameters(params); 6837 6838 sp<AMessage> reply; 6839 if (msg->findMessage("reply", &reply)) { 6840 reply->setInt32("err", err); 6841 reply->post(); 6842 } 6843 6844 handled = true; 6845 break; 6846 } 6847 6848 case ACodec::kWhatSignalEndOfInputStream: 6849 { 6850 mCodec->onSignalEndOfInputStream(); 6851 handled = true; 6852 break; 6853 } 6854 6855 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 6856 case kWhatSubmitOutputMetadataBufferIfEOS: 6857 { 6858 if (mCodec->mPortEOS[kPortIndexInput] && 6859 !mCodec->mPortEOS[kPortIndexOutput]) { 6860 status_t err = mCodec->submitOutputMetadataBuffer(); 6861 if (err == OK) { 6862 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 6863 } 6864 } 6865 return true; 6866 } 6867 6868 default: 6869 handled = BaseState::onMessageReceived(msg); 6870 break; 6871 } 6872 6873 return handled; 6874} 6875 6876status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 6877 int32_t videoBitrate; 6878 if (params->findInt32("video-bitrate", &videoBitrate)) { 6879 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 6880 InitOMXParams(&configParams); 6881 configParams.nPortIndex = kPortIndexOutput; 6882 configParams.nEncodeBitrate = videoBitrate; 6883 6884 status_t err = mOMX->setConfig( 6885 mNode, 6886 OMX_IndexConfigVideoBitrate, 6887 &configParams, 6888 sizeof(configParams)); 6889 6890 if (err != OK) { 6891 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 6892 videoBitrate, err); 6893 6894 return err; 6895 } 6896 } 6897 6898 int64_t skipFramesBeforeUs; 6899 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 6900 status_t err = 6901 mOMX->setInternalOption( 6902 mNode, 6903 kPortIndexInput, 6904 IOMX::INTERNAL_OPTION_START_TIME, 6905 &skipFramesBeforeUs, 6906 sizeof(skipFramesBeforeUs)); 6907 6908 if (err != OK) { 6909 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 6910 return err; 6911 } 6912 } 6913 6914 int32_t dropInputFrames; 6915 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 6916 bool suspend = dropInputFrames != 0; 6917 6918 status_t err = 6919 mOMX->setInternalOption( 6920 mNode, 6921 kPortIndexInput, 6922 IOMX::INTERNAL_OPTION_SUSPEND, 6923 &suspend, 6924 sizeof(suspend)); 6925 6926 if (err != OK) { 6927 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 6928 return err; 6929 } 6930 } 6931 6932 int32_t dummy; 6933 if (params->findInt32("request-sync", &dummy)) { 6934 status_t err = requestIDRFrame(); 6935 6936 if (err != OK) { 6937 ALOGE("Requesting a sync frame failed w/ err %d", err); 6938 return err; 6939 } 6940 } 6941 6942 float rate; 6943 if (params->findFloat("operating-rate", &rate) && rate > 0) { 6944 status_t err = setOperatingRate(rate, mIsVideo); 6945 if (err != OK) { 6946 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 6947 return err; 6948 } 6949 } 6950 6951 int32_t intraRefreshPeriod = 0; 6952 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 6953 && intraRefreshPeriod > 0) { 6954 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 6955 if (err != OK) { 6956 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 6957 mComponentName.c_str()); 6958 err = OK; 6959 } 6960 } 6961 6962 return OK; 6963} 6964 6965void ACodec::onSignalEndOfInputStream() { 6966 sp<AMessage> notify = mNotify->dup(); 6967 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 6968 6969 status_t err = mOMX->signalEndOfInputStream(mNode); 6970 if (err != OK) { 6971 notify->setInt32("err", err); 6972 } 6973 notify->post(); 6974} 6975 6976bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 6977 mCodec->onFrameRendered(mediaTimeUs, systemNano); 6978 return true; 6979} 6980 6981bool ACodec::ExecutingState::onOMXEvent( 6982 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6983 switch (event) { 6984 case OMX_EventPortSettingsChanged: 6985 { 6986 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 6987 6988 mCodec->onOutputFormatChanged(); 6989 6990 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 6991 mCodec->mMetadataBuffersToSubmit = 0; 6992 CHECK_EQ(mCodec->mOMX->sendCommand( 6993 mCodec->mNode, 6994 OMX_CommandPortDisable, kPortIndexOutput), 6995 (status_t)OK); 6996 6997 mCodec->freeOutputBuffersNotOwnedByComponent(); 6998 6999 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7000 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7001 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7002 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7003 mCodec->mComponentName.c_str(), data2); 7004 } 7005 7006 return true; 7007 } 7008 7009 case OMX_EventBufferFlag: 7010 { 7011 return true; 7012 } 7013 7014 default: 7015 return BaseState::onOMXEvent(event, data1, data2); 7016 } 7017} 7018 7019//////////////////////////////////////////////////////////////////////////////// 7020 7021ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7022 ACodec *codec) 7023 : BaseState(codec) { 7024} 7025 7026ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7027 OMX_U32 portIndex) { 7028 if (portIndex == kPortIndexOutput) { 7029 return FREE_BUFFERS; 7030 } 7031 7032 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7033 7034 return RESUBMIT_BUFFERS; 7035} 7036 7037bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7038 const sp<AMessage> &msg) { 7039 bool handled = false; 7040 7041 switch (msg->what()) { 7042 case kWhatFlush: 7043 case kWhatShutdown: 7044 case kWhatResume: 7045 case kWhatSetParameters: 7046 { 7047 if (msg->what() == kWhatResume) { 7048 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7049 } 7050 7051 mCodec->deferMessage(msg); 7052 handled = true; 7053 break; 7054 } 7055 7056 default: 7057 handled = BaseState::onMessageReceived(msg); 7058 break; 7059 } 7060 7061 return handled; 7062} 7063 7064void ACodec::OutputPortSettingsChangedState::stateEntered() { 7065 ALOGV("[%s] Now handling output port settings change", 7066 mCodec->mComponentName.c_str()); 7067} 7068 7069bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7070 int64_t mediaTimeUs, nsecs_t systemNano) { 7071 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7072 return true; 7073} 7074 7075bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7076 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7077 switch (event) { 7078 case OMX_EventCmdComplete: 7079 { 7080 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7081 if (data2 != (OMX_U32)kPortIndexOutput) { 7082 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7083 return false; 7084 } 7085 7086 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7087 7088 status_t err = OK; 7089 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7090 ALOGE("disabled port should be empty, but has %zu buffers", 7091 mCodec->mBuffers[kPortIndexOutput].size()); 7092 err = FAILED_TRANSACTION; 7093 } else { 7094 mCodec->mDealer[kPortIndexOutput].clear(); 7095 } 7096 7097 if (err == OK) { 7098 err = mCodec->mOMX->sendCommand( 7099 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput); 7100 } 7101 7102 if (err == OK) { 7103 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7104 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7105 "reconfiguration: (%d)", err); 7106 } 7107 7108 if (err != OK) { 7109 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7110 7111 // This is technically not correct, but appears to be 7112 // the only way to free the component instance. 7113 // Controlled transitioning from excecuting->idle 7114 // and idle->loaded seem impossible probably because 7115 // the output port never finishes re-enabling. 7116 mCodec->mShutdownInProgress = true; 7117 mCodec->mKeepComponentAllocated = false; 7118 mCodec->changeState(mCodec->mLoadedState); 7119 } 7120 7121 return true; 7122 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7123 if (data2 != (OMX_U32)kPortIndexOutput) { 7124 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7125 return false; 7126 } 7127 7128 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7129 7130 if (mCodec->mExecutingState->active()) { 7131 mCodec->mExecutingState->submitOutputBuffers(); 7132 } 7133 7134 mCodec->changeState(mCodec->mExecutingState); 7135 7136 return true; 7137 } 7138 7139 return false; 7140 } 7141 7142 default: 7143 return false; 7144 } 7145} 7146 7147//////////////////////////////////////////////////////////////////////////////// 7148 7149ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7150 : BaseState(codec), 7151 mComponentNowIdle(false) { 7152} 7153 7154bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7155 bool handled = false; 7156 7157 switch (msg->what()) { 7158 case kWhatFlush: 7159 { 7160 // Don't send me a flush request if you previously wanted me 7161 // to shutdown. 7162 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7163 break; 7164 } 7165 7166 case kWhatShutdown: 7167 { 7168 // We're already doing that... 7169 7170 handled = true; 7171 break; 7172 } 7173 7174 default: 7175 handled = BaseState::onMessageReceived(msg); 7176 break; 7177 } 7178 7179 return handled; 7180} 7181 7182void ACodec::ExecutingToIdleState::stateEntered() { 7183 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7184 7185 mComponentNowIdle = false; 7186 mCodec->mLastOutputFormat.clear(); 7187} 7188 7189bool ACodec::ExecutingToIdleState::onOMXEvent( 7190 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7191 switch (event) { 7192 case OMX_EventCmdComplete: 7193 { 7194 if (data1 != (OMX_U32)OMX_CommandStateSet 7195 || data2 != (OMX_U32)OMX_StateIdle) { 7196 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7197 asString((OMX_COMMANDTYPE)data1), data1, 7198 asString((OMX_STATETYPE)data2), data2); 7199 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7200 return true; 7201 } 7202 7203 mComponentNowIdle = true; 7204 7205 changeStateIfWeOwnAllBuffers(); 7206 7207 return true; 7208 } 7209 7210 case OMX_EventPortSettingsChanged: 7211 case OMX_EventBufferFlag: 7212 { 7213 // We're shutting down and don't care about this anymore. 7214 return true; 7215 } 7216 7217 default: 7218 return BaseState::onOMXEvent(event, data1, data2); 7219 } 7220} 7221 7222void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7223 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7224 status_t err = mCodec->mOMX->sendCommand( 7225 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7226 if (err == OK) { 7227 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7228 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7229 if (err == OK) { 7230 err = err2; 7231 } 7232 } 7233 7234 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7235 && mCodec->mNativeWindow != NULL) { 7236 // We push enough 1x1 blank buffers to ensure that one of 7237 // them has made it to the display. This allows the OMX 7238 // component teardown to zero out any protected buffers 7239 // without the risk of scanning out one of those buffers. 7240 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7241 } 7242 7243 if (err != OK) { 7244 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7245 return; 7246 } 7247 7248 mCodec->changeState(mCodec->mIdleToLoadedState); 7249 } 7250} 7251 7252void ACodec::ExecutingToIdleState::onInputBufferFilled( 7253 const sp<AMessage> &msg) { 7254 BaseState::onInputBufferFilled(msg); 7255 7256 changeStateIfWeOwnAllBuffers(); 7257} 7258 7259void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7260 const sp<AMessage> &msg) { 7261 BaseState::onOutputBufferDrained(msg); 7262 7263 changeStateIfWeOwnAllBuffers(); 7264} 7265 7266//////////////////////////////////////////////////////////////////////////////// 7267 7268ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7269 : BaseState(codec) { 7270} 7271 7272bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7273 bool handled = false; 7274 7275 switch (msg->what()) { 7276 case kWhatShutdown: 7277 { 7278 // We're already doing that... 7279 7280 handled = true; 7281 break; 7282 } 7283 7284 case kWhatFlush: 7285 { 7286 // Don't send me a flush request if you previously wanted me 7287 // to shutdown. 7288 ALOGE("Got flush request in IdleToLoadedState"); 7289 break; 7290 } 7291 7292 default: 7293 handled = BaseState::onMessageReceived(msg); 7294 break; 7295 } 7296 7297 return handled; 7298} 7299 7300void ACodec::IdleToLoadedState::stateEntered() { 7301 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7302} 7303 7304bool ACodec::IdleToLoadedState::onOMXEvent( 7305 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7306 switch (event) { 7307 case OMX_EventCmdComplete: 7308 { 7309 if (data1 != (OMX_U32)OMX_CommandStateSet 7310 || data2 != (OMX_U32)OMX_StateLoaded) { 7311 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7312 asString((OMX_COMMANDTYPE)data1), data1, 7313 asString((OMX_STATETYPE)data2), data2); 7314 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7315 return true; 7316 } 7317 7318 mCodec->changeState(mCodec->mLoadedState); 7319 7320 return true; 7321 } 7322 7323 default: 7324 return BaseState::onOMXEvent(event, data1, data2); 7325 } 7326} 7327 7328//////////////////////////////////////////////////////////////////////////////// 7329 7330ACodec::FlushingState::FlushingState(ACodec *codec) 7331 : BaseState(codec) { 7332} 7333 7334void ACodec::FlushingState::stateEntered() { 7335 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7336 7337 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7338} 7339 7340bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7341 bool handled = false; 7342 7343 switch (msg->what()) { 7344 case kWhatShutdown: 7345 { 7346 mCodec->deferMessage(msg); 7347 break; 7348 } 7349 7350 case kWhatFlush: 7351 { 7352 // We're already doing this right now. 7353 handled = true; 7354 break; 7355 } 7356 7357 default: 7358 handled = BaseState::onMessageReceived(msg); 7359 break; 7360 } 7361 7362 return handled; 7363} 7364 7365bool ACodec::FlushingState::onOMXEvent( 7366 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7367 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7368 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7369 7370 switch (event) { 7371 case OMX_EventCmdComplete: 7372 { 7373 if (data1 != (OMX_U32)OMX_CommandFlush) { 7374 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7375 asString((OMX_COMMANDTYPE)data1), data1, data2); 7376 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7377 return true; 7378 } 7379 7380 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7381 if (mFlushComplete[data2]) { 7382 ALOGW("Flush already completed for %s port", 7383 data2 == kPortIndexInput ? "input" : "output"); 7384 return true; 7385 } 7386 mFlushComplete[data2] = true; 7387 7388 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7389 changeStateIfWeOwnAllBuffers(); 7390 } 7391 } else if (data2 == OMX_ALL) { 7392 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7393 ALOGW("received flush complete event for OMX_ALL before ports have been" 7394 "flushed (%d/%d)", 7395 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7396 return false; 7397 } 7398 7399 changeStateIfWeOwnAllBuffers(); 7400 } else { 7401 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7402 } 7403 7404 return true; 7405 } 7406 7407 case OMX_EventPortSettingsChanged: 7408 { 7409 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7410 msg->setInt32("type", omx_message::EVENT); 7411 msg->setInt32("node", mCodec->mNode); 7412 msg->setInt32("event", event); 7413 msg->setInt32("data1", data1); 7414 msg->setInt32("data2", data2); 7415 7416 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7417 mCodec->mComponentName.c_str()); 7418 7419 mCodec->deferMessage(msg); 7420 7421 return true; 7422 } 7423 7424 default: 7425 return BaseState::onOMXEvent(event, data1, data2); 7426 } 7427 7428 return true; 7429} 7430 7431void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7432 BaseState::onOutputBufferDrained(msg); 7433 7434 changeStateIfWeOwnAllBuffers(); 7435} 7436 7437void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7438 BaseState::onInputBufferFilled(msg); 7439 7440 changeStateIfWeOwnAllBuffers(); 7441} 7442 7443void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 7444 if (mFlushComplete[kPortIndexInput] 7445 && mFlushComplete[kPortIndexOutput] 7446 && mCodec->allYourBuffersAreBelongToUs()) { 7447 // We now own all buffers except possibly those still queued with 7448 // the native window for rendering. Let's get those back as well. 7449 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 7450 7451 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7452 7453 sp<AMessage> notify = mCodec->mNotify->dup(); 7454 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7455 notify->post(); 7456 7457 mCodec->mPortEOS[kPortIndexInput] = 7458 mCodec->mPortEOS[kPortIndexOutput] = false; 7459 7460 mCodec->mInputEOSResult = OK; 7461 7462 if (mCodec->mSkipCutBuffer != NULL) { 7463 mCodec->mSkipCutBuffer->clear(); 7464 } 7465 7466 mCodec->changeState(mCodec->mExecutingState); 7467 } 7468} 7469 7470status_t ACodec::queryCapabilities( 7471 const AString &name, const AString &mime, bool isEncoder, 7472 sp<MediaCodecInfo::Capabilities> *caps) { 7473 (*caps).clear(); 7474 const char *role = getComponentRole(isEncoder, mime.c_str()); 7475 if (role == NULL) { 7476 return BAD_VALUE; 7477 } 7478 7479 OMXClient client; 7480 status_t err = client.connect(); 7481 if (err != OK) { 7482 return err; 7483 } 7484 7485 sp<IOMX> omx = client.interface(); 7486 sp<CodecObserver> observer = new CodecObserver; 7487 IOMX::node_id node = 0; 7488 7489 err = omx->allocateNode(name.c_str(), observer, NULL, &node); 7490 if (err != OK) { 7491 client.disconnect(); 7492 return err; 7493 } 7494 7495 err = setComponentRole(omx, node, role); 7496 if (err != OK) { 7497 omx->freeNode(node); 7498 client.disconnect(); 7499 return err; 7500 } 7501 7502 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 7503 bool isVideo = mime.startsWithIgnoreCase("video/"); 7504 7505 if (isVideo) { 7506 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 7507 InitOMXParams(¶m); 7508 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7509 7510 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7511 param.nProfileIndex = index; 7512 status_t err = omx->getParameter( 7513 node, OMX_IndexParamVideoProfileLevelQuerySupported, 7514 ¶m, sizeof(param)); 7515 if (err != OK) { 7516 break; 7517 } 7518 builder->addProfileLevel(param.eProfile, param.eLevel); 7519 7520 if (index == kMaxIndicesToCheck) { 7521 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 7522 name.c_str(), index, 7523 param.eProfile, param.eLevel); 7524 } 7525 } 7526 7527 // Color format query 7528 // return colors in the order reported by the OMX component 7529 // prefix "flexible" standard ones with the flexible equivalent 7530 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 7531 InitOMXParams(&portFormat); 7532 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 7533 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 7534 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7535 portFormat.nIndex = index; 7536 status_t err = omx->getParameter( 7537 node, OMX_IndexParamVideoPortFormat, 7538 &portFormat, sizeof(portFormat)); 7539 if (err != OK) { 7540 break; 7541 } 7542 7543 OMX_U32 flexibleEquivalent; 7544 if (isFlexibleColorFormat( 7545 omx, node, portFormat.eColorFormat, false /* usingNativeWindow */, 7546 &flexibleEquivalent)) { 7547 bool marked = false; 7548 for (size_t i = 0; i < supportedColors.size(); ++i) { 7549 if (supportedColors[i] == flexibleEquivalent) { 7550 marked = true; 7551 break; 7552 } 7553 } 7554 if (!marked) { 7555 supportedColors.push(flexibleEquivalent); 7556 builder->addColorFormat(flexibleEquivalent); 7557 } 7558 } 7559 supportedColors.push(portFormat.eColorFormat); 7560 builder->addColorFormat(portFormat.eColorFormat); 7561 7562 if (index == kMaxIndicesToCheck) { 7563 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 7564 name.c_str(), index, 7565 asString(portFormat.eColorFormat), portFormat.eColorFormat); 7566 } 7567 } 7568 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 7569 // More audio codecs if they have profiles. 7570 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 7571 InitOMXParams(¶m); 7572 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7573 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7574 param.nProfileIndex = index; 7575 status_t err = omx->getParameter( 7576 node, (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 7577 ¶m, sizeof(param)); 7578 if (err != OK) { 7579 break; 7580 } 7581 // For audio, level is ignored. 7582 builder->addProfileLevel(param.eProfile, 0 /* level */); 7583 7584 if (index == kMaxIndicesToCheck) { 7585 ALOGW("[%s] stopping checking profiles after %u: %x", 7586 name.c_str(), index, 7587 param.eProfile); 7588 } 7589 } 7590 7591 // NOTE: Without Android extensions, OMX does not provide a way to query 7592 // AAC profile support 7593 if (param.nProfileIndex == 0) { 7594 ALOGW("component %s doesn't support profile query.", name.c_str()); 7595 } 7596 } 7597 7598 if (isVideo && !isEncoder) { 7599 native_handle_t *sidebandHandle = NULL; 7600 if (omx->configureVideoTunnelMode( 7601 node, kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 7602 // tunneled playback includes adaptive playback 7603 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 7604 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 7605 } else if (omx->storeMetaDataInBuffers( 7606 node, kPortIndexOutput, OMX_TRUE) == OK || 7607 omx->prepareForAdaptivePlayback( 7608 node, kPortIndexOutput, OMX_TRUE, 7609 1280 /* width */, 720 /* height */) == OK) { 7610 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 7611 } 7612 } 7613 7614 if (isVideo && isEncoder) { 7615 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 7616 InitOMXParams(¶ms); 7617 params.nPortIndex = kPortIndexOutput; 7618 // TODO: should we verify if fallback is supported? 7619 if (omx->getConfig( 7620 node, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 7621 ¶ms, sizeof(params)) == OK) { 7622 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 7623 } 7624 } 7625 7626 *caps = builder; 7627 omx->freeNode(node); 7628 client.disconnect(); 7629 return OK; 7630} 7631 7632// These are supposed be equivalent to the logic in 7633// "audio_channel_out_mask_from_count". 7634//static 7635status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 7636 switch (numChannels) { 7637 case 1: 7638 map[0] = OMX_AUDIO_ChannelCF; 7639 break; 7640 case 2: 7641 map[0] = OMX_AUDIO_ChannelLF; 7642 map[1] = OMX_AUDIO_ChannelRF; 7643 break; 7644 case 3: 7645 map[0] = OMX_AUDIO_ChannelLF; 7646 map[1] = OMX_AUDIO_ChannelRF; 7647 map[2] = OMX_AUDIO_ChannelCF; 7648 break; 7649 case 4: 7650 map[0] = OMX_AUDIO_ChannelLF; 7651 map[1] = OMX_AUDIO_ChannelRF; 7652 map[2] = OMX_AUDIO_ChannelLR; 7653 map[3] = OMX_AUDIO_ChannelRR; 7654 break; 7655 case 5: 7656 map[0] = OMX_AUDIO_ChannelLF; 7657 map[1] = OMX_AUDIO_ChannelRF; 7658 map[2] = OMX_AUDIO_ChannelCF; 7659 map[3] = OMX_AUDIO_ChannelLR; 7660 map[4] = OMX_AUDIO_ChannelRR; 7661 break; 7662 case 6: 7663 map[0] = OMX_AUDIO_ChannelLF; 7664 map[1] = OMX_AUDIO_ChannelRF; 7665 map[2] = OMX_AUDIO_ChannelCF; 7666 map[3] = OMX_AUDIO_ChannelLFE; 7667 map[4] = OMX_AUDIO_ChannelLR; 7668 map[5] = OMX_AUDIO_ChannelRR; 7669 break; 7670 case 7: 7671 map[0] = OMX_AUDIO_ChannelLF; 7672 map[1] = OMX_AUDIO_ChannelRF; 7673 map[2] = OMX_AUDIO_ChannelCF; 7674 map[3] = OMX_AUDIO_ChannelLFE; 7675 map[4] = OMX_AUDIO_ChannelLR; 7676 map[5] = OMX_AUDIO_ChannelRR; 7677 map[6] = OMX_AUDIO_ChannelCS; 7678 break; 7679 case 8: 7680 map[0] = OMX_AUDIO_ChannelLF; 7681 map[1] = OMX_AUDIO_ChannelRF; 7682 map[2] = OMX_AUDIO_ChannelCF; 7683 map[3] = OMX_AUDIO_ChannelLFE; 7684 map[4] = OMX_AUDIO_ChannelLR; 7685 map[5] = OMX_AUDIO_ChannelRR; 7686 map[6] = OMX_AUDIO_ChannelLS; 7687 map[7] = OMX_AUDIO_ChannelRS; 7688 break; 7689 default: 7690 return -EINVAL; 7691 } 7692 7693 return OK; 7694} 7695 7696} // namespace android 7697