ACodec.cpp revision 28edbba6aa6c1a9bbea76da2f5cc3cd73af8eb4c
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47 48#include <OMX_AudioExt.h> 49#include <OMX_VideoExt.h> 50#include <OMX_Component.h> 51#include <OMX_IndexExt.h> 52#include <OMX_AsString.h> 53 54#include "include/avc_utils.h" 55#include "omx/OMXUtils.h" 56 57namespace android { 58 59enum { 60 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 61}; 62 63// OMX errors are directly mapped into status_t range if 64// there is no corresponding MediaError status code. 65// Use the statusFromOMXError(int32_t omxError) function. 66// 67// Currently this is a direct map. 68// See frameworks/native/include/media/openmax/OMX_Core.h 69// 70// Vendor OMX errors from 0x90000000 - 0x9000FFFF 71// Extension OMX errors from 0x8F000000 - 0x90000000 72// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 73// 74 75// returns true if err is a recognized OMX error code. 76// as OMX error is OMX_S32, this is an int32_t type 77static inline bool isOMXError(int32_t err) { 78 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 79} 80 81// converts an OMX error to a status_t 82static inline status_t statusFromOMXError(int32_t omxError) { 83 switch (omxError) { 84 case OMX_ErrorInvalidComponentName: 85 case OMX_ErrorComponentNotFound: 86 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 87 default: 88 return isOMXError(omxError) ? omxError : 0; // no translation required 89 } 90} 91 92// checks and converts status_t to a non-side-effect status_t 93static inline status_t makeNoSideEffectStatus(status_t err) { 94 switch (err) { 95 // the following errors have side effects and may come 96 // from other code modules. Remap for safety reasons. 97 case INVALID_OPERATION: 98 case DEAD_OBJECT: 99 return UNKNOWN_ERROR; 100 default: 101 return err; 102 } 103} 104 105struct MessageList : public RefBase { 106 MessageList() { 107 } 108 virtual ~MessageList() { 109 } 110 std::list<sp<AMessage> > &getList() { return mList; } 111private: 112 std::list<sp<AMessage> > mList; 113 114 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 115}; 116 117struct CodecObserver : public BnOMXObserver { 118 CodecObserver() {} 119 120 void setNotificationMessage(const sp<AMessage> &msg) { 121 mNotify = msg; 122 } 123 124 // from IOMXObserver 125 virtual void onMessages(const std::list<omx_message> &messages) { 126 if (messages.empty()) { 127 return; 128 } 129 130 sp<AMessage> notify = mNotify->dup(); 131 bool first = true; 132 sp<MessageList> msgList = new MessageList(); 133 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 134 it != messages.cend(); ++it) { 135 const omx_message &omx_msg = *it; 136 if (first) { 137 notify->setInt32("node", omx_msg.node); 138 first = false; 139 } 140 141 sp<AMessage> msg = new AMessage; 142 msg->setInt32("type", omx_msg.type); 143 switch (omx_msg.type) { 144 case omx_message::EVENT: 145 { 146 msg->setInt32("event", omx_msg.u.event_data.event); 147 msg->setInt32("data1", omx_msg.u.event_data.data1); 148 msg->setInt32("data2", omx_msg.u.event_data.data2); 149 break; 150 } 151 152 case omx_message::EMPTY_BUFFER_DONE: 153 { 154 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 155 msg->setInt32("fence_fd", omx_msg.fenceFd); 156 break; 157 } 158 159 case omx_message::FILL_BUFFER_DONE: 160 { 161 msg->setInt32( 162 "buffer", omx_msg.u.extended_buffer_data.buffer); 163 msg->setInt32( 164 "range_offset", 165 omx_msg.u.extended_buffer_data.range_offset); 166 msg->setInt32( 167 "range_length", 168 omx_msg.u.extended_buffer_data.range_length); 169 msg->setInt32( 170 "flags", 171 omx_msg.u.extended_buffer_data.flags); 172 msg->setInt64( 173 "timestamp", 174 omx_msg.u.extended_buffer_data.timestamp); 175 msg->setInt32( 176 "fence_fd", omx_msg.fenceFd); 177 break; 178 } 179 180 case omx_message::FRAME_RENDERED: 181 { 182 msg->setInt64( 183 "media_time_us", omx_msg.u.render_data.timestamp); 184 msg->setInt64( 185 "system_nano", omx_msg.u.render_data.nanoTime); 186 break; 187 } 188 189 default: 190 ALOGE("Unrecognized message type: %d", omx_msg.type); 191 break; 192 } 193 msgList->getList().push_back(msg); 194 } 195 notify->setObject("messages", msgList); 196 notify->post(); 197 } 198 199protected: 200 virtual ~CodecObserver() {} 201 202private: 203 sp<AMessage> mNotify; 204 205 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 206}; 207 208//////////////////////////////////////////////////////////////////////////////// 209 210struct ACodec::BaseState : public AState { 211 BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 212 213protected: 214 enum PortMode { 215 KEEP_BUFFERS, 216 RESUBMIT_BUFFERS, 217 FREE_BUFFERS, 218 }; 219 220 ACodec *mCodec; 221 222 virtual PortMode getPortMode(OMX_U32 portIndex); 223 224 virtual bool onMessageReceived(const sp<AMessage> &msg); 225 226 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 227 228 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 229 virtual void onInputBufferFilled(const sp<AMessage> &msg); 230 231 void postFillThisBuffer(BufferInfo *info); 232 233private: 234 // Handles an OMX message. Returns true iff message was handled. 235 bool onOMXMessage(const sp<AMessage> &msg); 236 237 // Handles a list of messages. Returns true iff messages were handled. 238 bool onOMXMessageList(const sp<AMessage> &msg); 239 240 // returns true iff this message is for this component and the component is alive 241 bool checkOMXMessage(const sp<AMessage> &msg); 242 243 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 244 245 bool onOMXFillBufferDone( 246 IOMX::buffer_id bufferID, 247 size_t rangeOffset, size_t rangeLength, 248 OMX_U32 flags, 249 int64_t timeUs, 250 int fenceFd); 251 252 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 253 254 void getMoreInputDataIfPossible(); 255 256 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 257}; 258 259//////////////////////////////////////////////////////////////////////////////// 260 261struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 262 DeathNotifier(const sp<AMessage> ¬ify) 263 : mNotify(notify) { 264 } 265 266 virtual void binderDied(const wp<IBinder> &) { 267 mNotify->post(); 268 } 269 270protected: 271 virtual ~DeathNotifier() {} 272 273private: 274 sp<AMessage> mNotify; 275 276 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 277}; 278 279struct ACodec::UninitializedState : public ACodec::BaseState { 280 UninitializedState(ACodec *codec); 281 282protected: 283 virtual bool onMessageReceived(const sp<AMessage> &msg); 284 virtual void stateEntered(); 285 286private: 287 void onSetup(const sp<AMessage> &msg); 288 bool onAllocateComponent(const sp<AMessage> &msg); 289 290 sp<DeathNotifier> mDeathNotifier; 291 292 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 293}; 294 295//////////////////////////////////////////////////////////////////////////////// 296 297struct ACodec::LoadedState : public ACodec::BaseState { 298 LoadedState(ACodec *codec); 299 300protected: 301 virtual bool onMessageReceived(const sp<AMessage> &msg); 302 virtual void stateEntered(); 303 304private: 305 friend struct ACodec::UninitializedState; 306 307 bool onConfigureComponent(const sp<AMessage> &msg); 308 void onCreateInputSurface(const sp<AMessage> &msg); 309 void onSetInputSurface(const sp<AMessage> &msg); 310 void onStart(); 311 void onShutdown(bool keepComponentAllocated); 312 313 status_t setupInputSurface(); 314 315 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 316}; 317 318//////////////////////////////////////////////////////////////////////////////// 319 320struct ACodec::LoadedToIdleState : public ACodec::BaseState { 321 LoadedToIdleState(ACodec *codec); 322 323protected: 324 virtual bool onMessageReceived(const sp<AMessage> &msg); 325 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 326 virtual void stateEntered(); 327 328private: 329 status_t allocateBuffers(); 330 331 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 332}; 333 334//////////////////////////////////////////////////////////////////////////////// 335 336struct ACodec::IdleToExecutingState : public ACodec::BaseState { 337 IdleToExecutingState(ACodec *codec); 338 339protected: 340 virtual bool onMessageReceived(const sp<AMessage> &msg); 341 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 342 virtual void stateEntered(); 343 344private: 345 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 346}; 347 348//////////////////////////////////////////////////////////////////////////////// 349 350struct ACodec::ExecutingState : public ACodec::BaseState { 351 ExecutingState(ACodec *codec); 352 353 void submitRegularOutputBuffers(); 354 void submitOutputMetaBuffers(); 355 void submitOutputBuffers(); 356 357 // Submit output buffers to the decoder, submit input buffers to client 358 // to fill with data. 359 void resume(); 360 361 // Returns true iff input and output buffers are in play. 362 bool active() const { return mActive; } 363 364protected: 365 virtual PortMode getPortMode(OMX_U32 portIndex); 366 virtual bool onMessageReceived(const sp<AMessage> &msg); 367 virtual void stateEntered(); 368 369 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 370 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 371 372private: 373 bool mActive; 374 375 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 376}; 377 378//////////////////////////////////////////////////////////////////////////////// 379 380struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 381 OutputPortSettingsChangedState(ACodec *codec); 382 383protected: 384 virtual PortMode getPortMode(OMX_U32 portIndex); 385 virtual bool onMessageReceived(const sp<AMessage> &msg); 386 virtual void stateEntered(); 387 388 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 389 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 390 391private: 392 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 393}; 394 395//////////////////////////////////////////////////////////////////////////////// 396 397struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 398 ExecutingToIdleState(ACodec *codec); 399 400protected: 401 virtual bool onMessageReceived(const sp<AMessage> &msg); 402 virtual void stateEntered(); 403 404 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 405 406 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 407 virtual void onInputBufferFilled(const sp<AMessage> &msg); 408 409private: 410 void changeStateIfWeOwnAllBuffers(); 411 412 bool mComponentNowIdle; 413 414 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 415}; 416 417//////////////////////////////////////////////////////////////////////////////// 418 419struct ACodec::IdleToLoadedState : public ACodec::BaseState { 420 IdleToLoadedState(ACodec *codec); 421 422protected: 423 virtual bool onMessageReceived(const sp<AMessage> &msg); 424 virtual void stateEntered(); 425 426 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 427 428private: 429 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 430}; 431 432//////////////////////////////////////////////////////////////////////////////// 433 434struct ACodec::FlushingState : public ACodec::BaseState { 435 FlushingState(ACodec *codec); 436 437protected: 438 virtual bool onMessageReceived(const sp<AMessage> &msg); 439 virtual void stateEntered(); 440 441 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 442 443 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 444 virtual void onInputBufferFilled(const sp<AMessage> &msg); 445 446private: 447 bool mFlushComplete[2]; 448 449 void changeStateIfWeOwnAllBuffers(); 450 451 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 452}; 453 454//////////////////////////////////////////////////////////////////////////////// 455 456void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 457 if (mFenceFd >= 0) { 458 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 459 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 460 } 461 mFenceFd = fenceFd; 462 mIsReadFence = false; 463} 464 465void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 466 if (mFenceFd >= 0) { 467 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 468 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 469 } 470 mFenceFd = fenceFd; 471 mIsReadFence = true; 472} 473 474void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 475 if (mFenceFd >= 0 && mIsReadFence) { 476 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 477 } 478} 479 480void ACodec::BufferInfo::checkReadFence(const char *dbg) { 481 if (mFenceFd >= 0 && !mIsReadFence) { 482 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 483 } 484} 485 486//////////////////////////////////////////////////////////////////////////////// 487 488ACodec::ACodec() 489 : mQuirks(0), 490 mNode(0), 491 mUsingNativeWindow(false), 492 mNativeWindowUsageBits(0), 493 mIsVideo(false), 494 mIsEncoder(false), 495 mFatalError(false), 496 mShutdownInProgress(false), 497 mExplicitShutdown(false), 498 mEncoderDelay(0), 499 mEncoderPadding(0), 500 mRotationDegrees(0), 501 mChannelMaskPresent(false), 502 mChannelMask(0), 503 mDequeueCounter(0), 504 mInputMetadataType(kMetadataBufferTypeInvalid), 505 mOutputMetadataType(kMetadataBufferTypeInvalid), 506 mLegacyAdaptiveExperiment(false), 507 mMetadataBuffersToSubmit(0), 508 mRepeatFrameDelayUs(-1ll), 509 mMaxPtsGapUs(-1ll), 510 mMaxFps(-1), 511 mTimePerFrameUs(-1ll), 512 mTimePerCaptureUs(-1ll), 513 mCreateInputBuffersSuspended(false), 514 mTunneled(false), 515 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0) { 516 mUninitializedState = new UninitializedState(this); 517 mLoadedState = new LoadedState(this); 518 mLoadedToIdleState = new LoadedToIdleState(this); 519 mIdleToExecutingState = new IdleToExecutingState(this); 520 mExecutingState = new ExecutingState(this); 521 522 mOutputPortSettingsChangedState = 523 new OutputPortSettingsChangedState(this); 524 525 mExecutingToIdleState = new ExecutingToIdleState(this); 526 mIdleToLoadedState = new IdleToLoadedState(this); 527 mFlushingState = new FlushingState(this); 528 529 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 530 mInputEOSResult = OK; 531 532 changeState(mUninitializedState); 533} 534 535ACodec::~ACodec() { 536} 537 538void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 539 mNotify = msg; 540} 541 542void ACodec::initiateSetup(const sp<AMessage> &msg) { 543 msg->setWhat(kWhatSetup); 544 msg->setTarget(this); 545 msg->post(); 546} 547 548void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 549 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 550 msg->setMessage("params", params); 551 msg->post(); 552} 553 554void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 555 msg->setWhat(kWhatAllocateComponent); 556 msg->setTarget(this); 557 msg->post(); 558} 559 560void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 561 msg->setWhat(kWhatConfigureComponent); 562 msg->setTarget(this); 563 msg->post(); 564} 565 566status_t ACodec::setSurface(const sp<Surface> &surface) { 567 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 568 msg->setObject("surface", surface); 569 570 sp<AMessage> response; 571 status_t err = msg->postAndAwaitResponse(&response); 572 573 if (err == OK) { 574 (void)response->findInt32("err", &err); 575 } 576 return err; 577} 578 579void ACodec::initiateCreateInputSurface() { 580 (new AMessage(kWhatCreateInputSurface, this))->post(); 581} 582 583void ACodec::initiateSetInputSurface( 584 const sp<PersistentSurface> &surface) { 585 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 586 msg->setObject("input-surface", surface); 587 msg->post(); 588} 589 590void ACodec::signalEndOfInputStream() { 591 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 592} 593 594void ACodec::initiateStart() { 595 (new AMessage(kWhatStart, this))->post(); 596} 597 598void ACodec::signalFlush() { 599 ALOGV("[%s] signalFlush", mComponentName.c_str()); 600 (new AMessage(kWhatFlush, this))->post(); 601} 602 603void ACodec::signalResume() { 604 (new AMessage(kWhatResume, this))->post(); 605} 606 607void ACodec::initiateShutdown(bool keepComponentAllocated) { 608 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 609 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 610 msg->post(); 611 if (!keepComponentAllocated) { 612 // ensure shutdown completes in 3 seconds 613 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 614 } 615} 616 617void ACodec::signalRequestIDRFrame() { 618 (new AMessage(kWhatRequestIDRFrame, this))->post(); 619} 620 621// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 622// Some codecs may return input buffers before having them processed. 623// This causes a halt if we already signaled an EOS on the input 624// port. For now keep submitting an output buffer if there was an 625// EOS on the input port, but not yet on the output port. 626void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 627 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 628 mMetadataBuffersToSubmit > 0) { 629 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 630 } 631} 632 633status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 634 // allow keeping unset surface 635 if (surface == NULL) { 636 if (mNativeWindow != NULL) { 637 ALOGW("cannot unset a surface"); 638 return INVALID_OPERATION; 639 } 640 return OK; 641 } 642 643 // cannot switch from bytebuffers to surface 644 if (mNativeWindow == NULL) { 645 ALOGW("component was not configured with a surface"); 646 return INVALID_OPERATION; 647 } 648 649 ANativeWindow *nativeWindow = surface.get(); 650 // if we have not yet started the codec, we can simply set the native window 651 if (mBuffers[kPortIndexInput].size() == 0) { 652 mNativeWindow = surface; 653 return OK; 654 } 655 656 // we do not support changing a tunneled surface after start 657 if (mTunneled) { 658 ALOGW("cannot change tunneled surface"); 659 return INVALID_OPERATION; 660 } 661 662 int usageBits = 0; 663 status_t err = setupNativeWindowSizeFormatAndUsage(nativeWindow, &usageBits); 664 if (err != OK) { 665 return err; 666 } 667 668 int ignoredFlags = kVideoGrallocUsage; 669 // New output surface is not allowed to add new usage flag except ignored ones. 670 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 671 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 672 return BAD_VALUE; 673 } 674 675 // get min undequeued count. We cannot switch to a surface that has a higher 676 // undequeued count than we allocated. 677 int minUndequeuedBuffers = 0; 678 err = nativeWindow->query( 679 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 680 &minUndequeuedBuffers); 681 if (err != 0) { 682 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 683 strerror(-err), -err); 684 return err; 685 } 686 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 687 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 688 minUndequeuedBuffers, mNumUndequeuedBuffers); 689 return BAD_VALUE; 690 } 691 692 // we cannot change the number of output buffers while OMX is running 693 // set up surface to the same count 694 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 695 ALOGV("setting up surface for %zu buffers", buffers.size()); 696 697 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 698 if (err != 0) { 699 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 700 -err); 701 return err; 702 } 703 704 // need to enable allocation when attaching 705 surface->getIGraphicBufferProducer()->allowAllocation(true); 706 707 // for meta data mode, we move dequeud buffers to the new surface. 708 // for non-meta mode, we must move all registered buffers 709 for (size_t i = 0; i < buffers.size(); ++i) { 710 const BufferInfo &info = buffers[i]; 711 // skip undequeued buffers for meta data mode 712 if (storingMetadataInDecodedBuffers() 713 && !mLegacyAdaptiveExperiment 714 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 715 ALOGV("skipping buffer %p", info.mGraphicBuffer->getNativeBuffer()); 716 continue; 717 } 718 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 719 720 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 721 if (err != OK) { 722 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 723 info.mGraphicBuffer->getNativeBuffer(), 724 strerror(-err), -err); 725 return err; 726 } 727 } 728 729 // cancel undequeued buffers to new surface 730 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 731 for (size_t i = 0; i < buffers.size(); ++i) { 732 BufferInfo &info = buffers.editItemAt(i); 733 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 734 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 735 err = nativeWindow->cancelBuffer( 736 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 737 info.mFenceFd = -1; 738 if (err != OK) { 739 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 740 info.mGraphicBuffer->getNativeBuffer(), 741 strerror(-err), -err); 742 return err; 743 } 744 } 745 } 746 // disallow further allocation 747 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 748 } 749 750 // push blank buffers to previous window if requested 751 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 752 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 753 } 754 755 mNativeWindow = nativeWindow; 756 mNativeWindowUsageBits = usageBits; 757 return OK; 758} 759 760status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 761 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 762 763 CHECK(mDealer[portIndex] == NULL); 764 CHECK(mBuffers[portIndex].isEmpty()); 765 766 status_t err; 767 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 768 if (storingMetadataInDecodedBuffers()) { 769 err = allocateOutputMetadataBuffers(); 770 } else { 771 err = allocateOutputBuffersFromNativeWindow(); 772 } 773 } else { 774 OMX_PARAM_PORTDEFINITIONTYPE def; 775 InitOMXParams(&def); 776 def.nPortIndex = portIndex; 777 778 err = mOMX->getParameter( 779 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 780 781 if (err == OK) { 782 MetadataBufferType type = 783 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 784 int32_t bufSize = def.nBufferSize; 785 if (type == kMetadataBufferTypeGrallocSource) { 786 bufSize = sizeof(VideoGrallocMetadata); 787 } else if (type == kMetadataBufferTypeANWBuffer) { 788 bufSize = sizeof(VideoNativeMetadata); 789 } 790 791 // If using gralloc or native source input metadata buffers, allocate largest 792 // metadata size as we prefer to generate native source metadata, but component 793 // may require gralloc source. For camera source, allocate at least enough 794 // size for native metadata buffers. 795 int32_t allottedSize = bufSize; 796 if (portIndex == kPortIndexInput && type >= kMetadataBufferTypeGrallocSource) { 797 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 798 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 799 bufSize = max(bufSize, (int32_t)sizeof(VideoNativeMetadata)); 800 } 801 802 size_t alignment = MemoryDealer::getAllocationAlignment(); 803 804 ALOGV("[%s] Allocating %u buffers of size %d/%d (from %u using %s) on %s port", 805 mComponentName.c_str(), 806 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 807 portIndex == kPortIndexInput ? "input" : "output"); 808 809 if (bufSize == 0 || bufSize > kMaxCodecBufferSize) { 810 ALOGE("b/22885421"); 811 return NO_MEMORY; 812 } 813 814 // don't modify bufSize as OMX may not expect it to increase after negotiation 815 size_t alignedSize = align(bufSize, alignment); 816 if (def.nBufferCountActual > SIZE_MAX / alignedSize) { 817 ALOGE("b/22885421"); 818 return NO_MEMORY; 819 } 820 821 size_t totalSize = def.nBufferCountActual * alignedSize; 822 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 823 824 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 825 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 826 if (mem == NULL || mem->pointer() == NULL) { 827 return NO_MEMORY; 828 } 829 830 BufferInfo info; 831 info.mStatus = BufferInfo::OWNED_BY_US; 832 info.mFenceFd = -1; 833 info.mRenderInfo = NULL; 834 info.mNativeHandle = NULL; 835 836 uint32_t requiresAllocateBufferBit = 837 (portIndex == kPortIndexInput) 838 ? kRequiresAllocateBufferOnInputPorts 839 : kRequiresAllocateBufferOnOutputPorts; 840 841 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 842 mem.clear(); 843 844 void *ptr = NULL; 845 native_handle_t *native_handle = NULL; 846 err = mOMX->allocateSecureBuffer( 847 mNode, portIndex, bufSize, &info.mBufferID, 848 &ptr, &native_handle); 849 850 // TRICKY: this representation is unorthodox, but ACodec requires 851 // an ABuffer with a proper size to validate range offsets and lengths. 852 // Since mData is never referenced for secure input, it is used to store 853 // either the pointer to the secure buffer, or the opaque handle as on 854 // some devices ptr is actually an opaque handle, not a pointer. 855 856 // TRICKY2: use native handle as the base of the ABuffer if received one, 857 // because Widevine source only receives these base addresses. 858 info.mData = new ABuffer(ptr != NULL ? ptr : (void *)native_handle, bufSize); 859 info.mNativeHandle = NativeHandle::create(native_handle, true /* ownsHandle */); 860 } else if (mQuirks & requiresAllocateBufferBit) { 861 err = mOMX->allocateBufferWithBackup( 862 mNode, portIndex, mem, &info.mBufferID, allottedSize); 863 } else { 864 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize); 865 } 866 867 if (mem != NULL) { 868 info.mData = new ABuffer(mem->pointer(), bufSize); 869 if (type == kMetadataBufferTypeANWBuffer) { 870 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 871 } 872 info.mMemRef = mem; 873 } 874 875 mBuffers[portIndex].push(info); 876 } 877 } 878 } 879 880 if (err != OK) { 881 return err; 882 } 883 884 sp<AMessage> notify = mNotify->dup(); 885 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 886 887 notify->setInt32("portIndex", portIndex); 888 889 sp<PortDescription> desc = new PortDescription; 890 891 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 892 const BufferInfo &info = mBuffers[portIndex][i]; 893 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef); 894 } 895 896 notify->setObject("portDesc", desc); 897 notify->post(); 898 899 return OK; 900} 901 902status_t ACodec::setupNativeWindowSizeFormatAndUsage( 903 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */) { 904 OMX_PARAM_PORTDEFINITIONTYPE def; 905 InitOMXParams(&def); 906 def.nPortIndex = kPortIndexOutput; 907 908 status_t err = mOMX->getParameter( 909 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 910 911 if (err != OK) { 912 return err; 913 } 914 915 OMX_U32 usage = 0; 916 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 917 if (err != 0) { 918 ALOGW("querying usage flags from OMX IL component failed: %d", err); 919 // XXX: Currently this error is logged, but not fatal. 920 usage = 0; 921 } 922 int omxUsage = usage; 923 924 if (mFlags & kFlagIsGrallocUsageProtected) { 925 usage |= GRALLOC_USAGE_PROTECTED; 926 } 927 928 usage |= kVideoGrallocUsage; 929 *finalUsage = usage; 930 931 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 932 return setNativeWindowSizeFormatAndUsage( 933 nativeWindow, 934 def.format.video.nFrameWidth, 935 def.format.video.nFrameHeight, 936 def.format.video.eColorFormat, 937 mRotationDegrees, 938 usage); 939} 940 941status_t ACodec::configureOutputBuffersFromNativeWindow( 942 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 943 OMX_U32 *minUndequeuedBuffers) { 944 OMX_PARAM_PORTDEFINITIONTYPE def; 945 InitOMXParams(&def); 946 def.nPortIndex = kPortIndexOutput; 947 948 status_t err = mOMX->getParameter( 949 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 950 951 if (err == OK) { 952 err = setupNativeWindowSizeFormatAndUsage(mNativeWindow.get(), &mNativeWindowUsageBits); 953 } 954 if (err != OK) { 955 mNativeWindowUsageBits = 0; 956 return err; 957 } 958 959 // Exits here for tunneled video playback codecs -- i.e. skips native window 960 // buffer allocation step as this is managed by the tunneled OMX omponent 961 // itself and explicitly sets def.nBufferCountActual to 0. 962 if (mTunneled) { 963 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 964 def.nBufferCountActual = 0; 965 err = mOMX->setParameter( 966 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 967 968 *minUndequeuedBuffers = 0; 969 *bufferCount = 0; 970 *bufferSize = 0; 971 return err; 972 } 973 974 *minUndequeuedBuffers = 0; 975 err = mNativeWindow->query( 976 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 977 (int *)minUndequeuedBuffers); 978 979 if (err != 0) { 980 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 981 strerror(-err), -err); 982 return err; 983 } 984 985 // FIXME: assume that surface is controlled by app (native window 986 // returns the number for the case when surface is not controlled by app) 987 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 988 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 989 990 // Use conservative allocation while also trying to reduce starvation 991 // 992 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 993 // minimum needed for the consumer to be able to work 994 // 2. try to allocate two (2) additional buffers to reduce starvation from 995 // the consumer 996 // plus an extra buffer to account for incorrect minUndequeuedBufs 997 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 998 OMX_U32 newBufferCount = 999 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1000 def.nBufferCountActual = newBufferCount; 1001 err = mOMX->setParameter( 1002 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1003 1004 if (err == OK) { 1005 *minUndequeuedBuffers += extraBuffers; 1006 break; 1007 } 1008 1009 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1010 mComponentName.c_str(), newBufferCount, err); 1011 /* exit condition */ 1012 if (extraBuffers == 0) { 1013 return err; 1014 } 1015 } 1016 1017 err = native_window_set_buffer_count( 1018 mNativeWindow.get(), def.nBufferCountActual); 1019 1020 if (err != 0) { 1021 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1022 -err); 1023 return err; 1024 } 1025 1026 *bufferCount = def.nBufferCountActual; 1027 *bufferSize = def.nBufferSize; 1028 return err; 1029} 1030 1031status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1032 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1033 status_t err = configureOutputBuffersFromNativeWindow( 1034 &bufferCount, &bufferSize, &minUndequeuedBuffers); 1035 if (err != 0) 1036 return err; 1037 mNumUndequeuedBuffers = minUndequeuedBuffers; 1038 1039 if (!storingMetadataInDecodedBuffers()) { 1040 static_cast<Surface*>(mNativeWindow.get()) 1041 ->getIGraphicBufferProducer()->allowAllocation(true); 1042 } 1043 1044 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1045 "output port", 1046 mComponentName.c_str(), bufferCount, bufferSize); 1047 1048 // Dequeue buffers and send them to OMX 1049 for (OMX_U32 i = 0; i < bufferCount; i++) { 1050 ANativeWindowBuffer *buf; 1051 int fenceFd; 1052 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1053 if (err != 0) { 1054 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1055 break; 1056 } 1057 1058 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1059 BufferInfo info; 1060 info.mStatus = BufferInfo::OWNED_BY_US; 1061 info.mFenceFd = fenceFd; 1062 info.mIsReadFence = false; 1063 info.mRenderInfo = NULL; 1064 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 1065 info.mGraphicBuffer = graphicBuffer; 1066 mBuffers[kPortIndexOutput].push(info); 1067 1068 IOMX::buffer_id bufferId; 1069 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 1070 &bufferId); 1071 if (err != 0) { 1072 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1073 "%d", i, err); 1074 break; 1075 } 1076 1077 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1078 1079 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1080 mComponentName.c_str(), 1081 bufferId, graphicBuffer.get()); 1082 } 1083 1084 OMX_U32 cancelStart; 1085 OMX_U32 cancelEnd; 1086 1087 if (err != 0) { 1088 // If an error occurred while dequeuing we need to cancel any buffers 1089 // that were dequeued. 1090 cancelStart = 0; 1091 cancelEnd = mBuffers[kPortIndexOutput].size(); 1092 } else { 1093 // Return the required minimum undequeued buffers to the native window. 1094 cancelStart = bufferCount - minUndequeuedBuffers; 1095 cancelEnd = bufferCount; 1096 } 1097 1098 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1099 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1100 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1101 status_t error = cancelBufferToNativeWindow(info); 1102 if (err == 0) { 1103 err = error; 1104 } 1105 } 1106 } 1107 1108 if (!storingMetadataInDecodedBuffers()) { 1109 static_cast<Surface*>(mNativeWindow.get()) 1110 ->getIGraphicBufferProducer()->allowAllocation(false); 1111 } 1112 1113 return err; 1114} 1115 1116status_t ACodec::allocateOutputMetadataBuffers() { 1117 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1118 status_t err = configureOutputBuffersFromNativeWindow( 1119 &bufferCount, &bufferSize, &minUndequeuedBuffers); 1120 if (err != 0) 1121 return err; 1122 mNumUndequeuedBuffers = minUndequeuedBuffers; 1123 1124 ALOGV("[%s] Allocating %u meta buffers on output port", 1125 mComponentName.c_str(), bufferCount); 1126 1127 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1128 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1129 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1130 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1131 1132 // Dequeue buffers and send them to OMX 1133 for (OMX_U32 i = 0; i < bufferCount; i++) { 1134 BufferInfo info; 1135 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1136 info.mFenceFd = -1; 1137 info.mRenderInfo = NULL; 1138 info.mGraphicBuffer = NULL; 1139 info.mDequeuedAt = mDequeueCounter; 1140 1141 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1142 if (mem == NULL || mem->pointer() == NULL) { 1143 return NO_MEMORY; 1144 } 1145 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1146 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1147 } 1148 info.mData = new ABuffer(mem->pointer(), mem->size()); 1149 1150 // we use useBuffer for metadata regardless of quirks 1151 err = mOMX->useBuffer( 1152 mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1153 info.mMemRef = mem; 1154 mBuffers[kPortIndexOutput].push(info); 1155 1156 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1157 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1158 } 1159 1160 if (mLegacyAdaptiveExperiment) { 1161 // preallocate and preregister buffers 1162 static_cast<Surface *>(mNativeWindow.get()) 1163 ->getIGraphicBufferProducer()->allowAllocation(true); 1164 1165 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1166 "output port", 1167 mComponentName.c_str(), bufferCount, bufferSize); 1168 1169 // Dequeue buffers then cancel them all 1170 for (OMX_U32 i = 0; i < bufferCount; i++) { 1171 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1172 1173 ANativeWindowBuffer *buf; 1174 int fenceFd; 1175 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1176 if (err != 0) { 1177 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1178 break; 1179 } 1180 1181 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1182 mOMX->updateGraphicBufferInMeta( 1183 mNode, kPortIndexOutput, graphicBuffer, info->mBufferID); 1184 info->mStatus = BufferInfo::OWNED_BY_US; 1185 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1186 info->mGraphicBuffer = graphicBuffer; 1187 } 1188 1189 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1190 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1191 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1192 status_t error = cancelBufferToNativeWindow(info); 1193 if (err == OK) { 1194 err = error; 1195 } 1196 } 1197 } 1198 1199 static_cast<Surface*>(mNativeWindow.get()) 1200 ->getIGraphicBufferProducer()->allowAllocation(false); 1201 } 1202 1203 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1204 return err; 1205} 1206 1207status_t ACodec::submitOutputMetadataBuffer() { 1208 CHECK(storingMetadataInDecodedBuffers()); 1209 if (mMetadataBuffersToSubmit == 0) 1210 return OK; 1211 1212 BufferInfo *info = dequeueBufferFromNativeWindow(); 1213 if (info == NULL) { 1214 return ERROR_IO; 1215 } 1216 1217 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1218 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1219 1220 --mMetadataBuffersToSubmit; 1221 info->checkWriteFence("submitOutputMetadataBuffer"); 1222 status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd); 1223 info->mFenceFd = -1; 1224 if (err == OK) { 1225 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1226 } 1227 1228 return err; 1229} 1230 1231status_t ACodec::waitForFence(int fd, const char *dbg ) { 1232 status_t res = OK; 1233 if (fd >= 0) { 1234 sp<Fence> fence = new Fence(fd); 1235 res = fence->wait(IOMX::kFenceTimeoutMs); 1236 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1237 } 1238 return res; 1239} 1240 1241// static 1242const char *ACodec::_asString(BufferInfo::Status s) { 1243 switch (s) { 1244 case BufferInfo::OWNED_BY_US: return "OUR"; 1245 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1246 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1247 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1248 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1249 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1250 default: return "?"; 1251 } 1252} 1253 1254void ACodec::dumpBuffers(OMX_U32 portIndex) { 1255 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1256 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1257 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1258 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1259 const BufferInfo &info = mBuffers[portIndex][i]; 1260 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1261 i, info.mBufferID, info.mGraphicBuffer.get(), 1262 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1263 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1264 } 1265} 1266 1267status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1268 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1269 1270 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1271 mComponentName.c_str(), info->mBufferID); 1272 1273 info->checkWriteFence("cancelBufferToNativeWindow"); 1274 int err = mNativeWindow->cancelBuffer( 1275 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1276 info->mFenceFd = -1; 1277 1278 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1279 mComponentName.c_str(), info->mBufferID); 1280 // change ownership even if cancelBuffer fails 1281 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1282 1283 return err; 1284} 1285 1286void ACodec::updateRenderInfoForDequeuedBuffer( 1287 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1288 1289 info->mRenderInfo = 1290 mRenderTracker.updateInfoForDequeuedBuffer( 1291 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1292 1293 // check for any fences already signaled 1294 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1295} 1296 1297void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1298 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1299 mRenderTracker.dumpRenderQueue(); 1300 } 1301} 1302 1303void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1304 sp<AMessage> msg = mNotify->dup(); 1305 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1306 std::list<FrameRenderTracker::Info> done = 1307 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1308 1309 // unlink untracked frames 1310 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1311 it != done.cend(); ++it) { 1312 ssize_t index = it->getIndex(); 1313 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1314 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1315 } else if (index >= 0) { 1316 // THIS SHOULD NEVER HAPPEN 1317 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1318 } 1319 } 1320 1321 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1322 msg->post(); 1323 } 1324} 1325 1326ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1327 ANativeWindowBuffer *buf; 1328 CHECK(mNativeWindow.get() != NULL); 1329 1330 if (mTunneled) { 1331 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1332 " video playback mode mode!"); 1333 return NULL; 1334 } 1335 1336 if (mFatalError) { 1337 ALOGW("not dequeuing from native window due to fatal error"); 1338 return NULL; 1339 } 1340 1341 int fenceFd = -1; 1342 do { 1343 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1344 if (err != 0) { 1345 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1346 return NULL; 1347 } 1348 1349 bool stale = false; 1350 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1351 i--; 1352 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1353 1354 if (info->mGraphicBuffer != NULL && 1355 info->mGraphicBuffer->handle == buf->handle) { 1356 // Since consumers can attach buffers to BufferQueues, it is possible 1357 // that a known yet stale buffer can return from a surface that we 1358 // once used. We can simply ignore this as we have already dequeued 1359 // this buffer properly. NOTE: this does not eliminate all cases, 1360 // e.g. it is possible that we have queued the valid buffer to the 1361 // NW, and a stale copy of the same buffer gets dequeued - which will 1362 // be treated as the valid buffer by ACodec. 1363 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1364 ALOGI("dequeued stale buffer %p. discarding", buf); 1365 stale = true; 1366 break; 1367 } 1368 1369 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1370 info->mStatus = BufferInfo::OWNED_BY_US; 1371 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1372 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1373 return info; 1374 } 1375 } 1376 1377 // It is also possible to receive a previously unregistered buffer 1378 // in non-meta mode. These should be treated as stale buffers. The 1379 // same is possible in meta mode, in which case, it will be treated 1380 // as a normal buffer, which is not desirable. 1381 // TODO: fix this. 1382 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1383 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1384 stale = true; 1385 } 1386 if (stale) { 1387 // TODO: detach stale buffer, but there is no API yet to do it. 1388 buf = NULL; 1389 } 1390 } while (buf == NULL); 1391 1392 // get oldest undequeued buffer 1393 BufferInfo *oldest = NULL; 1394 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1395 i--; 1396 BufferInfo *info = 1397 &mBuffers[kPortIndexOutput].editItemAt(i); 1398 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1399 (oldest == NULL || 1400 // avoid potential issues from counter rolling over 1401 mDequeueCounter - info->mDequeuedAt > 1402 mDequeueCounter - oldest->mDequeuedAt)) { 1403 oldest = info; 1404 } 1405 } 1406 1407 // it is impossible dequeue a buffer when there are no buffers with ANW 1408 CHECK(oldest != NULL); 1409 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1410 // while loop above does not complete 1411 CHECK(storingMetadataInDecodedBuffers()); 1412 1413 // discard buffer in LRU info and replace with new buffer 1414 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1415 oldest->mStatus = BufferInfo::OWNED_BY_US; 1416 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1417 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1418 oldest->mRenderInfo = NULL; 1419 1420 mOMX->updateGraphicBufferInMeta( 1421 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 1422 oldest->mBufferID); 1423 1424 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1425 VideoGrallocMetadata *grallocMeta = 1426 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base()); 1427 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1428 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1429 mDequeueCounter - oldest->mDequeuedAt, 1430 (void *)(uintptr_t)grallocMeta->pHandle, 1431 oldest->mGraphicBuffer->handle, oldest->mData->base()); 1432 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1433 VideoNativeMetadata *nativeMeta = 1434 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base()); 1435 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1436 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1437 mDequeueCounter - oldest->mDequeuedAt, 1438 (void *)(uintptr_t)nativeMeta->pBuffer, 1439 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base()); 1440 } 1441 1442 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1443 return oldest; 1444} 1445 1446status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1447 status_t err = OK; 1448 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1449 i--; 1450 status_t err2 = freeBuffer(portIndex, i); 1451 if (err == OK) { 1452 err = err2; 1453 } 1454 } 1455 1456 // clear mDealer even on an error 1457 mDealer[portIndex].clear(); 1458 return err; 1459} 1460 1461status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1462 status_t err = OK; 1463 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1464 i--; 1465 BufferInfo *info = 1466 &mBuffers[kPortIndexOutput].editItemAt(i); 1467 1468 // At this time some buffers may still be with the component 1469 // or being drained. 1470 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1471 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1472 status_t err2 = freeBuffer(kPortIndexOutput, i); 1473 if (err == OK) { 1474 err = err2; 1475 } 1476 } 1477 } 1478 1479 return err; 1480} 1481 1482status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1483 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1484 status_t err = OK; 1485 1486 // there should not be any fences in the metadata 1487 MetadataBufferType type = 1488 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1489 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL 1490 && info->mData->size() >= sizeof(VideoNativeMetadata)) { 1491 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; 1492 if (fenceFd >= 0) { 1493 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1494 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1495 } 1496 } 1497 1498 switch (info->mStatus) { 1499 case BufferInfo::OWNED_BY_US: 1500 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1501 (void)cancelBufferToNativeWindow(info); 1502 } 1503 // fall through 1504 1505 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1506 err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID); 1507 break; 1508 1509 default: 1510 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1511 err = FAILED_TRANSACTION; 1512 break; 1513 } 1514 1515 if (info->mFenceFd >= 0) { 1516 ::close(info->mFenceFd); 1517 } 1518 1519 if (portIndex == kPortIndexOutput) { 1520 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1521 info->mRenderInfo = NULL; 1522 } 1523 1524 // remove buffer even if mOMX->freeBuffer fails 1525 mBuffers[portIndex].removeAt(i); 1526 return err; 1527} 1528 1529ACodec::BufferInfo *ACodec::findBufferByID( 1530 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1531 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1532 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1533 1534 if (info->mBufferID == bufferID) { 1535 if (index != NULL) { 1536 *index = i; 1537 } 1538 return info; 1539 } 1540 } 1541 1542 ALOGE("Could not find buffer with ID %u", bufferID); 1543 return NULL; 1544} 1545 1546status_t ACodec::setComponentRole( 1547 bool isEncoder, const char *mime) { 1548 const char *role = getComponentRole(isEncoder, mime); 1549 if (role == NULL) { 1550 return BAD_VALUE; 1551 } 1552 status_t err = setComponentRole(mOMX, mNode, role); 1553 if (err != OK) { 1554 ALOGW("[%s] Failed to set standard component role '%s'.", 1555 mComponentName.c_str(), role); 1556 } 1557 return err; 1558} 1559 1560//static 1561const char *ACodec::getComponentRole( 1562 bool isEncoder, const char *mime) { 1563 struct MimeToRole { 1564 const char *mime; 1565 const char *decoderRole; 1566 const char *encoderRole; 1567 }; 1568 1569 static const MimeToRole kMimeToRole[] = { 1570 { MEDIA_MIMETYPE_AUDIO_MPEG, 1571 "audio_decoder.mp3", "audio_encoder.mp3" }, 1572 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I, 1573 "audio_decoder.mp1", "audio_encoder.mp1" }, 1574 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, 1575 "audio_decoder.mp2", "audio_encoder.mp2" }, 1576 { MEDIA_MIMETYPE_AUDIO_AMR_NB, 1577 "audio_decoder.amrnb", "audio_encoder.amrnb" }, 1578 { MEDIA_MIMETYPE_AUDIO_AMR_WB, 1579 "audio_decoder.amrwb", "audio_encoder.amrwb" }, 1580 { MEDIA_MIMETYPE_AUDIO_AAC, 1581 "audio_decoder.aac", "audio_encoder.aac" }, 1582 { MEDIA_MIMETYPE_AUDIO_VORBIS, 1583 "audio_decoder.vorbis", "audio_encoder.vorbis" }, 1584 { MEDIA_MIMETYPE_AUDIO_OPUS, 1585 "audio_decoder.opus", "audio_encoder.opus" }, 1586 { MEDIA_MIMETYPE_AUDIO_G711_MLAW, 1587 "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" }, 1588 { MEDIA_MIMETYPE_AUDIO_G711_ALAW, 1589 "audio_decoder.g711alaw", "audio_encoder.g711alaw" }, 1590 { MEDIA_MIMETYPE_VIDEO_AVC, 1591 "video_decoder.avc", "video_encoder.avc" }, 1592 { MEDIA_MIMETYPE_VIDEO_HEVC, 1593 "video_decoder.hevc", "video_encoder.hevc" }, 1594 { MEDIA_MIMETYPE_VIDEO_MPEG4, 1595 "video_decoder.mpeg4", "video_encoder.mpeg4" }, 1596 { MEDIA_MIMETYPE_VIDEO_H263, 1597 "video_decoder.h263", "video_encoder.h263" }, 1598 { MEDIA_MIMETYPE_VIDEO_VP8, 1599 "video_decoder.vp8", "video_encoder.vp8" }, 1600 { MEDIA_MIMETYPE_VIDEO_VP9, 1601 "video_decoder.vp9", "video_encoder.vp9" }, 1602 { MEDIA_MIMETYPE_AUDIO_RAW, 1603 "audio_decoder.raw", "audio_encoder.raw" }, 1604 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, 1605 "video_decoder.dolby-vision", "video_encoder.dolby-vision" }, 1606 { MEDIA_MIMETYPE_AUDIO_FLAC, 1607 "audio_decoder.flac", "audio_encoder.flac" }, 1608 { MEDIA_MIMETYPE_AUDIO_MSGSM, 1609 "audio_decoder.gsm", "audio_encoder.gsm" }, 1610 { MEDIA_MIMETYPE_VIDEO_MPEG2, 1611 "video_decoder.mpeg2", "video_encoder.mpeg2" }, 1612 { MEDIA_MIMETYPE_AUDIO_AC3, 1613 "audio_decoder.ac3", "audio_encoder.ac3" }, 1614 { MEDIA_MIMETYPE_AUDIO_EAC3, 1615 "audio_decoder.eac3", "audio_encoder.eac3" }, 1616 }; 1617 1618 static const size_t kNumMimeToRole = 1619 sizeof(kMimeToRole) / sizeof(kMimeToRole[0]); 1620 1621 size_t i; 1622 for (i = 0; i < kNumMimeToRole; ++i) { 1623 if (!strcasecmp(mime, kMimeToRole[i].mime)) { 1624 break; 1625 } 1626 } 1627 1628 if (i == kNumMimeToRole) { 1629 return NULL; 1630 } 1631 1632 return isEncoder ? kMimeToRole[i].encoderRole 1633 : kMimeToRole[i].decoderRole; 1634} 1635 1636//static 1637status_t ACodec::setComponentRole( 1638 const sp<IOMX> &omx, IOMX::node_id node, const char *role) { 1639 OMX_PARAM_COMPONENTROLETYPE roleParams; 1640 InitOMXParams(&roleParams); 1641 1642 strncpy((char *)roleParams.cRole, 1643 role, OMX_MAX_STRINGNAME_SIZE - 1); 1644 1645 roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0'; 1646 1647 return omx->setParameter( 1648 node, OMX_IndexParamStandardComponentRole, 1649 &roleParams, sizeof(roleParams)); 1650} 1651 1652status_t ACodec::configureCodec( 1653 const char *mime, const sp<AMessage> &msg) { 1654 int32_t encoder; 1655 if (!msg->findInt32("encoder", &encoder)) { 1656 encoder = false; 1657 } 1658 1659 sp<AMessage> inputFormat = new AMessage; 1660 sp<AMessage> outputFormat = new AMessage; 1661 mConfigFormat = msg; 1662 1663 mIsEncoder = encoder; 1664 1665 mInputMetadataType = kMetadataBufferTypeInvalid; 1666 mOutputMetadataType = kMetadataBufferTypeInvalid; 1667 1668 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1669 1670 if (err != OK) { 1671 return err; 1672 } 1673 1674 int32_t bitRate = 0; 1675 // FLAC encoder doesn't need a bitrate, other encoders do 1676 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1677 && !msg->findInt32("bitrate", &bitRate)) { 1678 return INVALID_OPERATION; 1679 } 1680 1681 int32_t storeMeta; 1682 if (encoder 1683 && msg->findInt32("store-metadata-in-buffers", &storeMeta) 1684 && storeMeta != 0) { 1685 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1686 if (err != OK) { 1687 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1688 mComponentName.c_str(), err); 1689 1690 return err; 1691 } 1692 // For this specific case we could be using camera source even if storeMetaDataInBuffers 1693 // returns Gralloc source. Pretend that we are; this will force us to use nBufferSize. 1694 if (mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1695 mInputMetadataType = kMetadataBufferTypeCameraSource; 1696 } 1697 1698 uint32_t usageBits; 1699 if (mOMX->getParameter( 1700 mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1701 &usageBits, sizeof(usageBits)) == OK) { 1702 inputFormat->setInt32( 1703 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1704 } 1705 } 1706 1707 int32_t prependSPSPPS = 0; 1708 if (encoder 1709 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1710 && prependSPSPPS != 0) { 1711 OMX_INDEXTYPE index; 1712 err = mOMX->getExtensionIndex( 1713 mNode, 1714 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1715 &index); 1716 1717 if (err == OK) { 1718 PrependSPSPPSToIDRFramesParams params; 1719 InitOMXParams(¶ms); 1720 params.bEnable = OMX_TRUE; 1721 1722 err = mOMX->setParameter( 1723 mNode, index, ¶ms, sizeof(params)); 1724 } 1725 1726 if (err != OK) { 1727 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1728 "IDR frames. (err %d)", err); 1729 1730 return err; 1731 } 1732 } 1733 1734 // Only enable metadata mode on encoder output if encoder can prepend 1735 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1736 // opaque handle, to which we don't have access. 1737 int32_t video = !strncasecmp(mime, "video/", 6); 1738 mIsVideo = video; 1739 if (encoder && video) { 1740 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1741 && msg->findInt32("store-metadata-in-buffers-output", &storeMeta) 1742 && storeMeta != 0); 1743 1744 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType); 1745 if (err != OK) { 1746 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1747 mComponentName.c_str(), err); 1748 } 1749 1750 if (!msg->findInt64( 1751 "repeat-previous-frame-after", 1752 &mRepeatFrameDelayUs)) { 1753 mRepeatFrameDelayUs = -1ll; 1754 } 1755 1756 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1757 mMaxPtsGapUs = -1ll; 1758 } 1759 1760 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1761 mMaxFps = -1; 1762 } 1763 1764 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1765 mTimePerCaptureUs = -1ll; 1766 } 1767 1768 if (!msg->findInt32( 1769 "create-input-buffers-suspended", 1770 (int32_t*)&mCreateInputBuffersSuspended)) { 1771 mCreateInputBuffersSuspended = false; 1772 } 1773 } 1774 1775 // NOTE: we only use native window for video decoders 1776 sp<RefBase> obj; 1777 bool haveNativeWindow = msg->findObject("native-window", &obj) 1778 && obj != NULL && video && !encoder; 1779 mUsingNativeWindow = haveNativeWindow; 1780 mLegacyAdaptiveExperiment = false; 1781 if (video && !encoder) { 1782 inputFormat->setInt32("adaptive-playback", false); 1783 1784 int32_t usageProtected; 1785 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1786 if (!haveNativeWindow) { 1787 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1788 return PERMISSION_DENIED; 1789 } 1790 mFlags |= kFlagIsGrallocUsageProtected; 1791 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1792 } 1793 1794 if (mFlags & kFlagIsSecure) { 1795 // use native_handles for secure input buffers 1796 err = mOMX->enableNativeBuffers( 1797 mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1798 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1799 err = OK; // ignore error for now 1800 } 1801 } 1802 if (haveNativeWindow) { 1803 sp<ANativeWindow> nativeWindow = 1804 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1805 1806 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1807 int32_t autoFrc; 1808 if (msg->findInt32("auto-frc", &autoFrc)) { 1809 bool enabled = autoFrc; 1810 OMX_CONFIG_BOOLEANTYPE config; 1811 InitOMXParams(&config); 1812 config.bEnabled = (OMX_BOOL)enabled; 1813 status_t temp = mOMX->setConfig( 1814 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1815 &config, sizeof(config)); 1816 if (temp == OK) { 1817 outputFormat->setInt32("auto-frc", enabled); 1818 } else if (enabled) { 1819 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1820 } 1821 } 1822 // END of temporary support for automatic FRC 1823 1824 int32_t tunneled; 1825 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1826 tunneled != 0) { 1827 ALOGI("Configuring TUNNELED video playback."); 1828 mTunneled = true; 1829 1830 int32_t audioHwSync = 0; 1831 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1832 ALOGW("No Audio HW Sync provided for video tunnel"); 1833 } 1834 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1835 if (err != OK) { 1836 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1837 audioHwSync, nativeWindow.get()); 1838 return err; 1839 } 1840 1841 int32_t maxWidth = 0, maxHeight = 0; 1842 if (msg->findInt32("max-width", &maxWidth) && 1843 msg->findInt32("max-height", &maxHeight)) { 1844 1845 err = mOMX->prepareForAdaptivePlayback( 1846 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1847 if (err != OK) { 1848 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1849 mComponentName.c_str(), err); 1850 // allow failure 1851 err = OK; 1852 } else { 1853 inputFormat->setInt32("max-width", maxWidth); 1854 inputFormat->setInt32("max-height", maxHeight); 1855 inputFormat->setInt32("adaptive-playback", true); 1856 } 1857 } 1858 } else { 1859 ALOGV("Configuring CPU controlled video playback."); 1860 mTunneled = false; 1861 1862 // Explicity reset the sideband handle of the window for 1863 // non-tunneled video in case the window was previously used 1864 // for a tunneled video playback. 1865 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1866 if (err != OK) { 1867 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1868 return err; 1869 } 1870 1871 // Always try to enable dynamic output buffers on native surface 1872 err = mOMX->storeMetaDataInBuffers( 1873 mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1874 if (err != OK) { 1875 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1876 mComponentName.c_str(), err); 1877 1878 // if adaptive playback has been requested, try JB fallback 1879 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1880 // LARGE MEMORY REQUIREMENT 1881 1882 // we will not do adaptive playback on software accessed 1883 // surfaces as they never had to respond to changes in the 1884 // crop window, and we don't trust that they will be able to. 1885 int usageBits = 0; 1886 bool canDoAdaptivePlayback; 1887 1888 if (nativeWindow->query( 1889 nativeWindow.get(), 1890 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1891 &usageBits) != OK) { 1892 canDoAdaptivePlayback = false; 1893 } else { 1894 canDoAdaptivePlayback = 1895 (usageBits & 1896 (GRALLOC_USAGE_SW_READ_MASK | 1897 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1898 } 1899 1900 int32_t maxWidth = 0, maxHeight = 0; 1901 if (canDoAdaptivePlayback && 1902 msg->findInt32("max-width", &maxWidth) && 1903 msg->findInt32("max-height", &maxHeight)) { 1904 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1905 mComponentName.c_str(), maxWidth, maxHeight); 1906 1907 err = mOMX->prepareForAdaptivePlayback( 1908 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1909 maxHeight); 1910 ALOGW_IF(err != OK, 1911 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1912 mComponentName.c_str(), err); 1913 1914 if (err == OK) { 1915 inputFormat->setInt32("max-width", maxWidth); 1916 inputFormat->setInt32("max-height", maxHeight); 1917 inputFormat->setInt32("adaptive-playback", true); 1918 } 1919 } 1920 // allow failure 1921 err = OK; 1922 } else { 1923 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1924 mComponentName.c_str()); 1925 CHECK(storingMetadataInDecodedBuffers()); 1926 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 1927 "legacy-adaptive", !msg->contains("no-experiments")); 1928 1929 inputFormat->setInt32("adaptive-playback", true); 1930 } 1931 1932 int32_t push; 1933 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 1934 && push != 0) { 1935 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1936 } 1937 } 1938 1939 int32_t rotationDegrees; 1940 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 1941 mRotationDegrees = rotationDegrees; 1942 } else { 1943 mRotationDegrees = 0; 1944 } 1945 } 1946 1947 if (video) { 1948 // determine need for software renderer 1949 bool usingSwRenderer = false; 1950 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 1951 usingSwRenderer = true; 1952 haveNativeWindow = false; 1953 } 1954 1955 if (encoder) { 1956 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 1957 } else { 1958 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 1959 } 1960 1961 if (err != OK) { 1962 return err; 1963 } 1964 1965 if (haveNativeWindow) { 1966 mNativeWindow = static_cast<Surface *>(obj.get()); 1967 } 1968 1969 // initialize native window now to get actual output format 1970 // TODO: this is needed for some encoders even though they don't use native window 1971 err = initNativeWindow(); 1972 if (err != OK) { 1973 return err; 1974 } 1975 1976 // fallback for devices that do not handle flex-YUV for native buffers 1977 if (haveNativeWindow) { 1978 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 1979 if (msg->findInt32("color-format", &requestedColorFormat) && 1980 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 1981 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 1982 if (err != OK) { 1983 return err; 1984 } 1985 int32_t colorFormat = OMX_COLOR_FormatUnused; 1986 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 1987 if (!outputFormat->findInt32("color-format", &colorFormat)) { 1988 ALOGE("ouptut port did not have a color format (wrong domain?)"); 1989 return BAD_VALUE; 1990 } 1991 ALOGD("[%s] Requested output format %#x and got %#x.", 1992 mComponentName.c_str(), requestedColorFormat, colorFormat); 1993 if (!isFlexibleColorFormat( 1994 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 1995 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 1996 // device did not handle flex-YUV request for native window, fall back 1997 // to SW renderer 1998 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 1999 mNativeWindow.clear(); 2000 mNativeWindowUsageBits = 0; 2001 haveNativeWindow = false; 2002 usingSwRenderer = true; 2003 if (storingMetadataInDecodedBuffers()) { 2004 err = mOMX->storeMetaDataInBuffers( 2005 mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 2006 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 2007 // TODO: implement adaptive-playback support for bytebuffer mode. 2008 // This is done by SW codecs, but most HW codecs don't support it. 2009 inputFormat->setInt32("adaptive-playback", false); 2010 } 2011 if (err == OK) { 2012 err = mOMX->enableNativeBuffers( 2013 mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 2014 } 2015 if (mFlags & kFlagIsGrallocUsageProtected) { 2016 // fallback is not supported for protected playback 2017 err = PERMISSION_DENIED; 2018 } else if (err == OK) { 2019 err = setupVideoDecoder( 2020 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2021 } 2022 } 2023 } 2024 } 2025 2026 if (usingSwRenderer) { 2027 outputFormat->setInt32("using-sw-renderer", 1); 2028 } 2029 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2030 int32_t numChannels, sampleRate; 2031 if (!msg->findInt32("channel-count", &numChannels) 2032 || !msg->findInt32("sample-rate", &sampleRate)) { 2033 // Since we did not always check for these, leave them optional 2034 // and have the decoder figure it all out. 2035 err = OK; 2036 } else { 2037 err = setupRawAudioFormat( 2038 encoder ? kPortIndexInput : kPortIndexOutput, 2039 sampleRate, 2040 numChannels); 2041 } 2042 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2043 int32_t numChannels, sampleRate; 2044 if (!msg->findInt32("channel-count", &numChannels) 2045 || !msg->findInt32("sample-rate", &sampleRate)) { 2046 err = INVALID_OPERATION; 2047 } else { 2048 int32_t isADTS, aacProfile; 2049 int32_t sbrMode; 2050 int32_t maxOutputChannelCount; 2051 int32_t pcmLimiterEnable; 2052 drcParams_t drc; 2053 if (!msg->findInt32("is-adts", &isADTS)) { 2054 isADTS = 0; 2055 } 2056 if (!msg->findInt32("aac-profile", &aacProfile)) { 2057 aacProfile = OMX_AUDIO_AACObjectNull; 2058 } 2059 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2060 sbrMode = -1; 2061 } 2062 2063 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2064 maxOutputChannelCount = -1; 2065 } 2066 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2067 // value is unknown 2068 pcmLimiterEnable = -1; 2069 } 2070 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2071 // value is unknown 2072 drc.encodedTargetLevel = -1; 2073 } 2074 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2075 // value is unknown 2076 drc.drcCut = -1; 2077 } 2078 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2079 // value is unknown 2080 drc.drcBoost = -1; 2081 } 2082 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2083 // value is unknown 2084 drc.heavyCompression = -1; 2085 } 2086 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2087 // value is unknown 2088 drc.targetRefLevel = -1; 2089 } 2090 2091 err = setupAACCodec( 2092 encoder, numChannels, sampleRate, bitRate, aacProfile, 2093 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2094 pcmLimiterEnable); 2095 } 2096 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2097 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2098 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2099 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2100 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2101 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2102 // These are PCM-like formats with a fixed sample rate but 2103 // a variable number of channels. 2104 2105 int32_t numChannels; 2106 if (!msg->findInt32("channel-count", &numChannels)) { 2107 err = INVALID_OPERATION; 2108 } else { 2109 int32_t sampleRate; 2110 if (!msg->findInt32("sample-rate", &sampleRate)) { 2111 sampleRate = 8000; 2112 } 2113 err = setupG711Codec(encoder, sampleRate, numChannels); 2114 } 2115 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2116 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2117 if (encoder && 2118 (!msg->findInt32("channel-count", &numChannels) 2119 || !msg->findInt32("sample-rate", &sampleRate))) { 2120 ALOGE("missing channel count or sample rate for FLAC encoder"); 2121 err = INVALID_OPERATION; 2122 } else { 2123 if (encoder) { 2124 if (!msg->findInt32( 2125 "complexity", &compressionLevel) && 2126 !msg->findInt32( 2127 "flac-compression-level", &compressionLevel)) { 2128 compressionLevel = 5; // default FLAC compression level 2129 } else if (compressionLevel < 0) { 2130 ALOGW("compression level %d outside [0..8] range, " 2131 "using 0", 2132 compressionLevel); 2133 compressionLevel = 0; 2134 } else if (compressionLevel > 8) { 2135 ALOGW("compression level %d outside [0..8] range, " 2136 "using 8", 2137 compressionLevel); 2138 compressionLevel = 8; 2139 } 2140 } 2141 err = setupFlacCodec( 2142 encoder, numChannels, sampleRate, compressionLevel); 2143 } 2144 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2145 int32_t numChannels, sampleRate; 2146 if (encoder 2147 || !msg->findInt32("channel-count", &numChannels) 2148 || !msg->findInt32("sample-rate", &sampleRate)) { 2149 err = INVALID_OPERATION; 2150 } else { 2151 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels); 2152 } 2153 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2154 int32_t numChannels; 2155 int32_t sampleRate; 2156 if (!msg->findInt32("channel-count", &numChannels) 2157 || !msg->findInt32("sample-rate", &sampleRate)) { 2158 err = INVALID_OPERATION; 2159 } else { 2160 err = setupAC3Codec(encoder, numChannels, sampleRate); 2161 } 2162 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2163 int32_t numChannels; 2164 int32_t sampleRate; 2165 if (!msg->findInt32("channel-count", &numChannels) 2166 || !msg->findInt32("sample-rate", &sampleRate)) { 2167 err = INVALID_OPERATION; 2168 } else { 2169 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2170 } 2171 } 2172 2173 if (err != OK) { 2174 return err; 2175 } 2176 2177 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2178 mEncoderDelay = 0; 2179 } 2180 2181 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2182 mEncoderPadding = 0; 2183 } 2184 2185 if (msg->findInt32("channel-mask", &mChannelMask)) { 2186 mChannelMaskPresent = true; 2187 } else { 2188 mChannelMaskPresent = false; 2189 } 2190 2191 int32_t maxInputSize; 2192 if (msg->findInt32("max-input-size", &maxInputSize)) { 2193 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2194 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2195 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2196 } 2197 2198 int32_t priority; 2199 if (msg->findInt32("priority", &priority)) { 2200 err = setPriority(priority); 2201 } 2202 2203 int32_t rateInt = -1; 2204 float rateFloat = -1; 2205 if (!msg->findFloat("operating-rate", &rateFloat)) { 2206 msg->findInt32("operating-rate", &rateInt); 2207 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2208 } 2209 if (rateFloat > 0) { 2210 err = setOperatingRate(rateFloat, video); 2211 } 2212 2213 mBaseOutputFormat = outputFormat; 2214 // trigger a kWhatOutputFormatChanged msg on first buffer 2215 mLastOutputFormat.clear(); 2216 2217 err = getPortFormat(kPortIndexInput, inputFormat); 2218 if (err == OK) { 2219 err = getPortFormat(kPortIndexOutput, outputFormat); 2220 if (err == OK) { 2221 mInputFormat = inputFormat; 2222 mOutputFormat = outputFormat; 2223 } 2224 } 2225 return err; 2226} 2227 2228status_t ACodec::setPriority(int32_t priority) { 2229 if (priority < 0) { 2230 return BAD_VALUE; 2231 } 2232 OMX_PARAM_U32TYPE config; 2233 InitOMXParams(&config); 2234 config.nU32 = (OMX_U32)priority; 2235 status_t temp = mOMX->setConfig( 2236 mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2237 &config, sizeof(config)); 2238 if (temp != OK) { 2239 ALOGI("codec does not support config priority (err %d)", temp); 2240 } 2241 return OK; 2242} 2243 2244status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2245 if (rateFloat < 0) { 2246 return BAD_VALUE; 2247 } 2248 OMX_U32 rate; 2249 if (isVideo) { 2250 if (rateFloat > 65535) { 2251 return BAD_VALUE; 2252 } 2253 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2254 } else { 2255 if (rateFloat > UINT_MAX) { 2256 return BAD_VALUE; 2257 } 2258 rate = (OMX_U32)(rateFloat); 2259 } 2260 OMX_PARAM_U32TYPE config; 2261 InitOMXParams(&config); 2262 config.nU32 = rate; 2263 status_t err = mOMX->setConfig( 2264 mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2265 &config, sizeof(config)); 2266 if (err != OK) { 2267 ALOGI("codec does not support config operating rate (err %d)", err); 2268 } 2269 return OK; 2270} 2271 2272status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2273 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2274 InitOMXParams(¶ms); 2275 params.nPortIndex = kPortIndexOutput; 2276 status_t err = mOMX->getConfig( 2277 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2278 if (err == OK) { 2279 *intraRefreshPeriod = params.nRefreshPeriod; 2280 return OK; 2281 } 2282 2283 // Fallback to query through standard OMX index. 2284 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2285 InitOMXParams(&refreshParams); 2286 refreshParams.nPortIndex = kPortIndexOutput; 2287 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2288 err = mOMX->getParameter( 2289 mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2290 if (err != OK || refreshParams.nCirMBs == 0) { 2291 *intraRefreshPeriod = 0; 2292 return OK; 2293 } 2294 2295 // Calculate period based on width and height 2296 uint32_t width, height; 2297 OMX_PARAM_PORTDEFINITIONTYPE def; 2298 InitOMXParams(&def); 2299 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2300 def.nPortIndex = kPortIndexOutput; 2301 err = mOMX->getParameter( 2302 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2303 if (err != OK) { 2304 *intraRefreshPeriod = 0; 2305 return err; 2306 } 2307 width = video_def->nFrameWidth; 2308 height = video_def->nFrameHeight; 2309 // Use H.264/AVC MacroBlock size 16x16 2310 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2311 2312 return OK; 2313} 2314 2315status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2316 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2317 InitOMXParams(¶ms); 2318 params.nPortIndex = kPortIndexOutput; 2319 params.nRefreshPeriod = intraRefreshPeriod; 2320 status_t err = mOMX->setConfig( 2321 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2322 if (err == OK) { 2323 return OK; 2324 } 2325 2326 // Only in configure state, a component could invoke setParameter. 2327 if (!inConfigure) { 2328 return INVALID_OPERATION; 2329 } else { 2330 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2331 } 2332 2333 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2334 InitOMXParams(&refreshParams); 2335 refreshParams.nPortIndex = kPortIndexOutput; 2336 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2337 2338 if (intraRefreshPeriod == 0) { 2339 // 0 means disable intra refresh. 2340 refreshParams.nCirMBs = 0; 2341 } else { 2342 // Calculate macroblocks that need to be intra coded base on width and height 2343 uint32_t width, height; 2344 OMX_PARAM_PORTDEFINITIONTYPE def; 2345 InitOMXParams(&def); 2346 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2347 def.nPortIndex = kPortIndexOutput; 2348 err = mOMX->getParameter( 2349 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2350 if (err != OK) { 2351 return err; 2352 } 2353 width = video_def->nFrameWidth; 2354 height = video_def->nFrameHeight; 2355 // Use H.264/AVC MacroBlock size 16x16 2356 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2357 } 2358 2359 err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh, 2360 &refreshParams, sizeof(refreshParams)); 2361 if (err != OK) { 2362 return err; 2363 } 2364 2365 return OK; 2366} 2367 2368status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2369 OMX_PARAM_PORTDEFINITIONTYPE def; 2370 InitOMXParams(&def); 2371 def.nPortIndex = portIndex; 2372 2373 status_t err = mOMX->getParameter( 2374 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2375 2376 if (err != OK) { 2377 return err; 2378 } 2379 2380 if (def.nBufferSize >= size) { 2381 return OK; 2382 } 2383 2384 def.nBufferSize = size; 2385 2386 err = mOMX->setParameter( 2387 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2388 2389 if (err != OK) { 2390 return err; 2391 } 2392 2393 err = mOMX->getParameter( 2394 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2395 2396 if (err != OK) { 2397 return err; 2398 } 2399 2400 if (def.nBufferSize < size) { 2401 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2402 return FAILED_TRANSACTION; 2403 } 2404 2405 return OK; 2406} 2407 2408status_t ACodec::selectAudioPortFormat( 2409 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2410 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2411 InitOMXParams(&format); 2412 2413 format.nPortIndex = portIndex; 2414 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2415 format.nIndex = index; 2416 status_t err = mOMX->getParameter( 2417 mNode, OMX_IndexParamAudioPortFormat, 2418 &format, sizeof(format)); 2419 2420 if (err != OK) { 2421 return err; 2422 } 2423 2424 if (format.eEncoding == desiredFormat) { 2425 break; 2426 } 2427 2428 if (index == kMaxIndicesToCheck) { 2429 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2430 mComponentName.c_str(), index, 2431 asString(format.eEncoding), format.eEncoding); 2432 return ERROR_UNSUPPORTED; 2433 } 2434 } 2435 2436 return mOMX->setParameter( 2437 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2438} 2439 2440status_t ACodec::setupAACCodec( 2441 bool encoder, int32_t numChannels, int32_t sampleRate, 2442 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2443 int32_t maxOutputChannelCount, const drcParams_t& drc, 2444 int32_t pcmLimiterEnable) { 2445 if (encoder && isADTS) { 2446 return -EINVAL; 2447 } 2448 2449 status_t err = setupRawAudioFormat( 2450 encoder ? kPortIndexInput : kPortIndexOutput, 2451 sampleRate, 2452 numChannels); 2453 2454 if (err != OK) { 2455 return err; 2456 } 2457 2458 if (encoder) { 2459 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2460 2461 if (err != OK) { 2462 return err; 2463 } 2464 2465 OMX_PARAM_PORTDEFINITIONTYPE def; 2466 InitOMXParams(&def); 2467 def.nPortIndex = kPortIndexOutput; 2468 2469 err = mOMX->getParameter( 2470 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2471 2472 if (err != OK) { 2473 return err; 2474 } 2475 2476 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2477 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2478 2479 err = mOMX->setParameter( 2480 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2481 2482 if (err != OK) { 2483 return err; 2484 } 2485 2486 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2487 InitOMXParams(&profile); 2488 profile.nPortIndex = kPortIndexOutput; 2489 2490 err = mOMX->getParameter( 2491 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2492 2493 if (err != OK) { 2494 return err; 2495 } 2496 2497 profile.nChannels = numChannels; 2498 2499 profile.eChannelMode = 2500 (numChannels == 1) 2501 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2502 2503 profile.nSampleRate = sampleRate; 2504 profile.nBitRate = bitRate; 2505 profile.nAudioBandWidth = 0; 2506 profile.nFrameLength = 0; 2507 profile.nAACtools = OMX_AUDIO_AACToolAll; 2508 profile.nAACERtools = OMX_AUDIO_AACERNone; 2509 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2510 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2511 switch (sbrMode) { 2512 case 0: 2513 // disable sbr 2514 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2515 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2516 break; 2517 case 1: 2518 // enable single-rate sbr 2519 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2520 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2521 break; 2522 case 2: 2523 // enable dual-rate sbr 2524 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2525 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2526 break; 2527 case -1: 2528 // enable both modes -> the codec will decide which mode should be used 2529 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2530 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2531 break; 2532 default: 2533 // unsupported sbr mode 2534 return BAD_VALUE; 2535 } 2536 2537 2538 err = mOMX->setParameter( 2539 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2540 2541 if (err != OK) { 2542 return err; 2543 } 2544 2545 return err; 2546 } 2547 2548 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2549 InitOMXParams(&profile); 2550 profile.nPortIndex = kPortIndexInput; 2551 2552 err = mOMX->getParameter( 2553 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2554 2555 if (err != OK) { 2556 return err; 2557 } 2558 2559 profile.nChannels = numChannels; 2560 profile.nSampleRate = sampleRate; 2561 2562 profile.eAACStreamFormat = 2563 isADTS 2564 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2565 : OMX_AUDIO_AACStreamFormatMP4FF; 2566 2567 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2568 InitOMXParams(&presentation); 2569 presentation.nMaxOutputChannels = maxOutputChannelCount; 2570 presentation.nDrcCut = drc.drcCut; 2571 presentation.nDrcBoost = drc.drcBoost; 2572 presentation.nHeavyCompression = drc.heavyCompression; 2573 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2574 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2575 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2576 2577 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2578 if (res == OK) { 2579 // optional parameters, will not cause configuration failure 2580 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2581 &presentation, sizeof(presentation)); 2582 } else { 2583 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2584 } 2585 return res; 2586} 2587 2588status_t ACodec::setupAC3Codec( 2589 bool encoder, int32_t numChannels, int32_t sampleRate) { 2590 status_t err = setupRawAudioFormat( 2591 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2592 2593 if (err != OK) { 2594 return err; 2595 } 2596 2597 if (encoder) { 2598 ALOGW("AC3 encoding is not supported."); 2599 return INVALID_OPERATION; 2600 } 2601 2602 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2603 InitOMXParams(&def); 2604 def.nPortIndex = kPortIndexInput; 2605 2606 err = mOMX->getParameter( 2607 mNode, 2608 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2609 &def, 2610 sizeof(def)); 2611 2612 if (err != OK) { 2613 return err; 2614 } 2615 2616 def.nChannels = numChannels; 2617 def.nSampleRate = sampleRate; 2618 2619 return mOMX->setParameter( 2620 mNode, 2621 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2622 &def, 2623 sizeof(def)); 2624} 2625 2626status_t ACodec::setupEAC3Codec( 2627 bool encoder, int32_t numChannels, int32_t sampleRate) { 2628 status_t err = setupRawAudioFormat( 2629 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2630 2631 if (err != OK) { 2632 return err; 2633 } 2634 2635 if (encoder) { 2636 ALOGW("EAC3 encoding is not supported."); 2637 return INVALID_OPERATION; 2638 } 2639 2640 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2641 InitOMXParams(&def); 2642 def.nPortIndex = kPortIndexInput; 2643 2644 err = mOMX->getParameter( 2645 mNode, 2646 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2647 &def, 2648 sizeof(def)); 2649 2650 if (err != OK) { 2651 return err; 2652 } 2653 2654 def.nChannels = numChannels; 2655 def.nSampleRate = sampleRate; 2656 2657 return mOMX->setParameter( 2658 mNode, 2659 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2660 &def, 2661 sizeof(def)); 2662} 2663 2664static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2665 bool isAMRWB, int32_t bps) { 2666 if (isAMRWB) { 2667 if (bps <= 6600) { 2668 return OMX_AUDIO_AMRBandModeWB0; 2669 } else if (bps <= 8850) { 2670 return OMX_AUDIO_AMRBandModeWB1; 2671 } else if (bps <= 12650) { 2672 return OMX_AUDIO_AMRBandModeWB2; 2673 } else if (bps <= 14250) { 2674 return OMX_AUDIO_AMRBandModeWB3; 2675 } else if (bps <= 15850) { 2676 return OMX_AUDIO_AMRBandModeWB4; 2677 } else if (bps <= 18250) { 2678 return OMX_AUDIO_AMRBandModeWB5; 2679 } else if (bps <= 19850) { 2680 return OMX_AUDIO_AMRBandModeWB6; 2681 } else if (bps <= 23050) { 2682 return OMX_AUDIO_AMRBandModeWB7; 2683 } 2684 2685 // 23850 bps 2686 return OMX_AUDIO_AMRBandModeWB8; 2687 } else { // AMRNB 2688 if (bps <= 4750) { 2689 return OMX_AUDIO_AMRBandModeNB0; 2690 } else if (bps <= 5150) { 2691 return OMX_AUDIO_AMRBandModeNB1; 2692 } else if (bps <= 5900) { 2693 return OMX_AUDIO_AMRBandModeNB2; 2694 } else if (bps <= 6700) { 2695 return OMX_AUDIO_AMRBandModeNB3; 2696 } else if (bps <= 7400) { 2697 return OMX_AUDIO_AMRBandModeNB4; 2698 } else if (bps <= 7950) { 2699 return OMX_AUDIO_AMRBandModeNB5; 2700 } else if (bps <= 10200) { 2701 return OMX_AUDIO_AMRBandModeNB6; 2702 } 2703 2704 // 12200 bps 2705 return OMX_AUDIO_AMRBandModeNB7; 2706 } 2707} 2708 2709status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2710 OMX_AUDIO_PARAM_AMRTYPE def; 2711 InitOMXParams(&def); 2712 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2713 2714 status_t err = 2715 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2716 2717 if (err != OK) { 2718 return err; 2719 } 2720 2721 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2722 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2723 2724 err = mOMX->setParameter( 2725 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2726 2727 if (err != OK) { 2728 return err; 2729 } 2730 2731 return setupRawAudioFormat( 2732 encoder ? kPortIndexInput : kPortIndexOutput, 2733 isWAMR ? 16000 : 8000 /* sampleRate */, 2734 1 /* numChannels */); 2735} 2736 2737status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2738 if (encoder) { 2739 return INVALID_OPERATION; 2740 } 2741 2742 return setupRawAudioFormat( 2743 kPortIndexInput, sampleRate, numChannels); 2744} 2745 2746status_t ACodec::setupFlacCodec( 2747 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2748 2749 if (encoder) { 2750 OMX_AUDIO_PARAM_FLACTYPE def; 2751 InitOMXParams(&def); 2752 def.nPortIndex = kPortIndexOutput; 2753 2754 // configure compression level 2755 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2756 if (err != OK) { 2757 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2758 return err; 2759 } 2760 def.nCompressionLevel = compressionLevel; 2761 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2762 if (err != OK) { 2763 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2764 return err; 2765 } 2766 } 2767 2768 return setupRawAudioFormat( 2769 encoder ? kPortIndexInput : kPortIndexOutput, 2770 sampleRate, 2771 numChannels); 2772} 2773 2774status_t ACodec::setupRawAudioFormat( 2775 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels) { 2776 OMX_PARAM_PORTDEFINITIONTYPE def; 2777 InitOMXParams(&def); 2778 def.nPortIndex = portIndex; 2779 2780 status_t err = mOMX->getParameter( 2781 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2782 2783 if (err != OK) { 2784 return err; 2785 } 2786 2787 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2788 2789 err = mOMX->setParameter( 2790 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2791 2792 if (err != OK) { 2793 return err; 2794 } 2795 2796 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2797 InitOMXParams(&pcmParams); 2798 pcmParams.nPortIndex = portIndex; 2799 2800 err = mOMX->getParameter( 2801 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2802 2803 if (err != OK) { 2804 return err; 2805 } 2806 2807 pcmParams.nChannels = numChannels; 2808 pcmParams.eNumData = OMX_NumericalDataSigned; 2809 pcmParams.bInterleaved = OMX_TRUE; 2810 pcmParams.nBitPerSample = 16; 2811 pcmParams.nSamplingRate = sampleRate; 2812 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 2813 2814 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 2815 return OMX_ErrorNone; 2816 } 2817 2818 return mOMX->setParameter( 2819 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2820} 2821 2822status_t ACodec::configureTunneledVideoPlayback( 2823 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 2824 native_handle_t* sidebandHandle; 2825 2826 status_t err = mOMX->configureVideoTunnelMode( 2827 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 2828 if (err != OK) { 2829 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 2830 return err; 2831 } 2832 2833 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 2834 if (err != OK) { 2835 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 2836 sidebandHandle, err); 2837 return err; 2838 } 2839 2840 return OK; 2841} 2842 2843status_t ACodec::setVideoPortFormatType( 2844 OMX_U32 portIndex, 2845 OMX_VIDEO_CODINGTYPE compressionFormat, 2846 OMX_COLOR_FORMATTYPE colorFormat, 2847 bool usingNativeBuffers) { 2848 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2849 InitOMXParams(&format); 2850 format.nPortIndex = portIndex; 2851 format.nIndex = 0; 2852 bool found = false; 2853 2854 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2855 format.nIndex = index; 2856 status_t err = mOMX->getParameter( 2857 mNode, OMX_IndexParamVideoPortFormat, 2858 &format, sizeof(format)); 2859 2860 if (err != OK) { 2861 return err; 2862 } 2863 2864 // substitute back flexible color format to codec supported format 2865 OMX_U32 flexibleEquivalent; 2866 if (compressionFormat == OMX_VIDEO_CodingUnused 2867 && isFlexibleColorFormat( 2868 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 2869 && colorFormat == flexibleEquivalent) { 2870 ALOGI("[%s] using color format %#x in place of %#x", 2871 mComponentName.c_str(), format.eColorFormat, colorFormat); 2872 colorFormat = format.eColorFormat; 2873 } 2874 2875 // The following assertion is violated by TI's video decoder. 2876 // CHECK_EQ(format.nIndex, index); 2877 2878 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 2879 if (portIndex == kPortIndexInput 2880 && colorFormat == format.eColorFormat) { 2881 // eCompressionFormat does not seem right. 2882 found = true; 2883 break; 2884 } 2885 if (portIndex == kPortIndexOutput 2886 && compressionFormat == format.eCompressionFormat) { 2887 // eColorFormat does not seem right. 2888 found = true; 2889 break; 2890 } 2891 } 2892 2893 if (format.eCompressionFormat == compressionFormat 2894 && format.eColorFormat == colorFormat) { 2895 found = true; 2896 break; 2897 } 2898 2899 if (index == kMaxIndicesToCheck) { 2900 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 2901 mComponentName.c_str(), index, 2902 asString(format.eCompressionFormat), format.eCompressionFormat, 2903 asString(format.eColorFormat), format.eColorFormat); 2904 } 2905 } 2906 2907 if (!found) { 2908 return UNKNOWN_ERROR; 2909 } 2910 2911 status_t err = mOMX->setParameter( 2912 mNode, OMX_IndexParamVideoPortFormat, 2913 &format, sizeof(format)); 2914 2915 return err; 2916} 2917 2918// Set optimal output format. OMX component lists output formats in the order 2919// of preference, but this got more complicated since the introduction of flexible 2920// YUV formats. We support a legacy behavior for applications that do not use 2921// surface output, do not specify an output format, but expect a "usable" standard 2922// OMX format. SW readable and standard formats must be flex-YUV. 2923// 2924// Suggested preference order: 2925// - optimal format for texture rendering (mediaplayer behavior) 2926// - optimal SW readable & texture renderable format (flex-YUV support) 2927// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 2928// - legacy "usable" standard formats 2929// 2930// For legacy support, we prefer a standard format, but will settle for a SW readable 2931// flex-YUV format. 2932status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 2933 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 2934 InitOMXParams(&format); 2935 format.nPortIndex = kPortIndexOutput; 2936 2937 InitOMXParams(&legacyFormat); 2938 // this field will change when we find a suitable legacy format 2939 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 2940 2941 for (OMX_U32 index = 0; ; ++index) { 2942 format.nIndex = index; 2943 status_t err = mOMX->getParameter( 2944 mNode, OMX_IndexParamVideoPortFormat, 2945 &format, sizeof(format)); 2946 if (err != OK) { 2947 // no more formats, pick legacy format if found 2948 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 2949 memcpy(&format, &legacyFormat, sizeof(format)); 2950 break; 2951 } 2952 return err; 2953 } 2954 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 2955 return OMX_ErrorBadParameter; 2956 } 2957 if (!getLegacyFlexibleFormat) { 2958 break; 2959 } 2960 // standard formats that were exposed to users before 2961 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 2962 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 2963 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 2964 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 2965 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 2966 break; 2967 } 2968 // find best legacy non-standard format 2969 OMX_U32 flexibleEquivalent; 2970 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 2971 && isFlexibleColorFormat( 2972 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */, 2973 &flexibleEquivalent) 2974 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 2975 memcpy(&legacyFormat, &format, sizeof(format)); 2976 } 2977 } 2978 return mOMX->setParameter( 2979 mNode, OMX_IndexParamVideoPortFormat, 2980 &format, sizeof(format)); 2981} 2982 2983static const struct VideoCodingMapEntry { 2984 const char *mMime; 2985 OMX_VIDEO_CODINGTYPE mVideoCodingType; 2986} kVideoCodingMapEntry[] = { 2987 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 2988 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 2989 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 2990 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 2991 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 2992 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 2993 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 2994 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 2995}; 2996 2997static status_t GetVideoCodingTypeFromMime( 2998 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 2999 for (size_t i = 0; 3000 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3001 ++i) { 3002 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3003 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3004 return OK; 3005 } 3006 } 3007 3008 *codingType = OMX_VIDEO_CodingUnused; 3009 3010 return ERROR_UNSUPPORTED; 3011} 3012 3013static status_t GetMimeTypeForVideoCoding( 3014 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3015 for (size_t i = 0; 3016 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3017 ++i) { 3018 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3019 *mime = kVideoCodingMapEntry[i].mMime; 3020 return OK; 3021 } 3022 } 3023 3024 mime->clear(); 3025 3026 return ERROR_UNSUPPORTED; 3027} 3028 3029status_t ACodec::setupVideoDecoder( 3030 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3031 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3032 int32_t width, height; 3033 if (!msg->findInt32("width", &width) 3034 || !msg->findInt32("height", &height)) { 3035 return INVALID_OPERATION; 3036 } 3037 3038 OMX_VIDEO_CODINGTYPE compressionFormat; 3039 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3040 3041 if (err != OK) { 3042 return err; 3043 } 3044 3045 err = setVideoPortFormatType( 3046 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3047 3048 if (err != OK) { 3049 return err; 3050 } 3051 3052 int32_t tmp; 3053 if (msg->findInt32("color-format", &tmp)) { 3054 OMX_COLOR_FORMATTYPE colorFormat = 3055 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3056 err = setVideoPortFormatType( 3057 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3058 if (err != OK) { 3059 ALOGW("[%s] does not support color format %d", 3060 mComponentName.c_str(), colorFormat); 3061 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3062 } 3063 } else { 3064 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3065 } 3066 3067 if (err != OK) { 3068 return err; 3069 } 3070 3071 int32_t frameRateInt; 3072 float frameRateFloat; 3073 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3074 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3075 frameRateInt = -1; 3076 } 3077 frameRateFloat = (float)frameRateInt; 3078 } 3079 3080 err = setVideoFormatOnPort( 3081 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3082 3083 if (err != OK) { 3084 return err; 3085 } 3086 3087 err = setVideoFormatOnPort( 3088 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3089 3090 if (err != OK) { 3091 return err; 3092 } 3093 3094 err = setColorAspectsForVideoDecoder( 3095 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3096 if (err == ERROR_UNSUPPORTED) { // support is optional 3097 err = OK; 3098 } 3099 return err; 3100} 3101 3102status_t ACodec::initDescribeColorAspectsIndex() { 3103 status_t err = mOMX->getExtensionIndex( 3104 mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3105 if (err != OK) { 3106 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3107 } 3108 return err; 3109} 3110 3111status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3112 status_t err = ERROR_UNSUPPORTED; 3113 if (mDescribeColorAspectsIndex) { 3114 err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3115 } 3116 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3117 mComponentName.c_str(), 3118 params.sAspects.mRange, asString(params.sAspects.mRange), 3119 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3120 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3121 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3122 err, asString(err)); 3123 3124 if (verify && err == OK) { 3125 err = getCodecColorAspects(params); 3126 } 3127 3128 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3129 "[%s] getting color aspects failed even though codec advertises support", 3130 mComponentName.c_str()); 3131 return err; 3132} 3133 3134status_t ACodec::setColorAspectsForVideoDecoder( 3135 int32_t width, int32_t height, bool usingNativeWindow, 3136 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3137 DescribeColorAspectsParams params; 3138 InitOMXParams(¶ms); 3139 params.nPortIndex = kPortIndexOutput; 3140 3141 getColorAspectsFromFormat(configFormat, params.sAspects); 3142 if (usingNativeWindow) { 3143 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3144 // The default aspects will be set back to the output format during the 3145 // getFormat phase of configure(). Set non-Unspecified values back into the 3146 // format, in case component does not support this enumeration. 3147 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3148 } 3149 3150 (void)initDescribeColorAspectsIndex(); 3151 3152 // communicate color aspects to codec 3153 return setCodecColorAspects(params); 3154} 3155 3156status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3157 status_t err = ERROR_UNSUPPORTED; 3158 if (mDescribeColorAspectsIndex) { 3159 err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3160 } 3161 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3162 mComponentName.c_str(), 3163 params.sAspects.mRange, asString(params.sAspects.mRange), 3164 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3165 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3166 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3167 err, asString(err)); 3168 if (params.bRequestingDataSpace) { 3169 ALOGV("for dataspace %#x", params.nDataSpace); 3170 } 3171 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3172 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3173 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3174 mComponentName.c_str()); 3175 } 3176 return err; 3177} 3178 3179status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3180 DescribeColorAspectsParams params; 3181 InitOMXParams(¶ms); 3182 params.nPortIndex = kPortIndexInput; 3183 status_t err = getCodecColorAspects(params); 3184 if (err == OK) { 3185 // we only set encoder input aspects if codec supports them 3186 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3187 } 3188 return err; 3189} 3190 3191status_t ACodec::getDataSpace( 3192 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3193 bool tryCodec) { 3194 status_t err = OK; 3195 if (tryCodec) { 3196 // request dataspace guidance from codec. 3197 params.bRequestingDataSpace = OMX_TRUE; 3198 err = getCodecColorAspects(params); 3199 params.bRequestingDataSpace = OMX_FALSE; 3200 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3201 *dataSpace = (android_dataspace)params.nDataSpace; 3202 return err; 3203 } else if (err == ERROR_UNSUPPORTED) { 3204 // ignore not-implemented error for dataspace requests 3205 err = OK; 3206 } 3207 } 3208 3209 // this returns legacy versions if available 3210 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3211 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3212 "and dataspace %#x", 3213 mComponentName.c_str(), 3214 params.sAspects.mRange, asString(params.sAspects.mRange), 3215 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3216 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3217 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3218 *dataSpace); 3219 return err; 3220} 3221 3222 3223status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3224 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3225 android_dataspace *dataSpace) { 3226 DescribeColorAspectsParams params; 3227 InitOMXParams(¶ms); 3228 params.nPortIndex = kPortIndexOutput; 3229 3230 // reset default format and get resulting format 3231 getColorAspectsFromFormat(configFormat, params.sAspects); 3232 if (dataSpace != NULL) { 3233 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3234 } 3235 status_t err = setCodecColorAspects(params, true /* readBack */); 3236 3237 // we always set specified aspects for decoders 3238 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3239 3240 if (dataSpace != NULL) { 3241 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3242 if (err == OK) { 3243 err = res; 3244 } 3245 } 3246 3247 return err; 3248} 3249 3250// initial video encoder setup for bytebuffer mode 3251status_t ACodec::setColorAspectsForVideoEncoder( 3252 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3253 // copy config to output format as this is not exposed via getFormat 3254 copyColorConfig(configFormat, outputFormat); 3255 3256 DescribeColorAspectsParams params; 3257 InitOMXParams(¶ms); 3258 params.nPortIndex = kPortIndexInput; 3259 getColorAspectsFromFormat(configFormat, params.sAspects); 3260 3261 (void)initDescribeColorAspectsIndex(); 3262 3263 int32_t usingRecorder; 3264 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3265 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3266 int32_t width, height; 3267 if (configFormat->findInt32("width", &width) 3268 && configFormat->findInt32("height", &height)) { 3269 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3270 status_t err = getDataSpace( 3271 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3272 if (err != OK) { 3273 return err; 3274 } 3275 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3276 } 3277 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3278 } 3279 3280 // communicate color aspects to codec, but do not allow change of the platform aspects 3281 ColorAspects origAspects = params.sAspects; 3282 for (int triesLeft = 2; --triesLeft >= 0; ) { 3283 status_t err = setCodecColorAspects(params, true /* readBack */); 3284 if (err != OK 3285 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3286 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3287 return err; 3288 } 3289 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3290 mComponentName.c_str()); 3291 } 3292 return OK; 3293} 3294 3295// subsequent initial video encoder setup for surface mode 3296status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3297 android_dataspace *dataSpace /* nonnull */) { 3298 DescribeColorAspectsParams params; 3299 InitOMXParams(¶ms); 3300 params.nPortIndex = kPortIndexInput; 3301 ColorAspects &aspects = params.sAspects; 3302 3303 // reset default format and store resulting format into both input and output formats 3304 getColorAspectsFromFormat(mConfigFormat, aspects); 3305 int32_t width, height; 3306 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3307 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3308 } 3309 setColorAspectsIntoFormat(aspects, mInputFormat); 3310 setColorAspectsIntoFormat(aspects, mOutputFormat); 3311 3312 // communicate color aspects to codec, but do not allow any change 3313 ColorAspects origAspects = aspects; 3314 status_t err = OK; 3315 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3316 status_t err = setCodecColorAspects(params, true /* readBack */); 3317 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3318 break; 3319 } 3320 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3321 mComponentName.c_str()); 3322 } 3323 3324 *dataSpace = HAL_DATASPACE_BT709; 3325 aspects = origAspects; // restore desired color aspects 3326 status_t res = getDataSpace( 3327 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3328 if (err == OK) { 3329 err = res; 3330 } 3331 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3332 mInputFormat->setBuffer( 3333 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3334 3335 // update input format with codec supported color aspects (basically set unsupported 3336 // aspects to Unspecified) 3337 if (err == OK) { 3338 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3339 } 3340 3341 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3342 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3343 3344 return err; 3345} 3346 3347status_t ACodec::setupVideoEncoder( 3348 const char *mime, const sp<AMessage> &msg, 3349 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3350 int32_t tmp; 3351 if (!msg->findInt32("color-format", &tmp)) { 3352 return INVALID_OPERATION; 3353 } 3354 3355 OMX_COLOR_FORMATTYPE colorFormat = 3356 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3357 3358 status_t err = setVideoPortFormatType( 3359 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3360 3361 if (err != OK) { 3362 ALOGE("[%s] does not support color format %d", 3363 mComponentName.c_str(), colorFormat); 3364 3365 return err; 3366 } 3367 3368 /* Input port configuration */ 3369 3370 OMX_PARAM_PORTDEFINITIONTYPE def; 3371 InitOMXParams(&def); 3372 3373 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3374 3375 def.nPortIndex = kPortIndexInput; 3376 3377 err = mOMX->getParameter( 3378 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3379 3380 if (err != OK) { 3381 return err; 3382 } 3383 3384 int32_t width, height, bitrate; 3385 if (!msg->findInt32("width", &width) 3386 || !msg->findInt32("height", &height) 3387 || !msg->findInt32("bitrate", &bitrate)) { 3388 return INVALID_OPERATION; 3389 } 3390 3391 video_def->nFrameWidth = width; 3392 video_def->nFrameHeight = height; 3393 3394 int32_t stride; 3395 if (!msg->findInt32("stride", &stride)) { 3396 stride = width; 3397 } 3398 3399 video_def->nStride = stride; 3400 3401 int32_t sliceHeight; 3402 if (!msg->findInt32("slice-height", &sliceHeight)) { 3403 sliceHeight = height; 3404 } 3405 3406 video_def->nSliceHeight = sliceHeight; 3407 3408 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3409 3410 float frameRate; 3411 if (!msg->findFloat("frame-rate", &frameRate)) { 3412 int32_t tmp; 3413 if (!msg->findInt32("frame-rate", &tmp)) { 3414 return INVALID_OPERATION; 3415 } 3416 frameRate = (float)tmp; 3417 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3418 } 3419 3420 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3421 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3422 // this is redundant as it was already set up in setVideoPortFormatType 3423 // FIXME for now skip this only for flexible YUV formats 3424 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3425 video_def->eColorFormat = colorFormat; 3426 } 3427 3428 err = mOMX->setParameter( 3429 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3430 3431 if (err != OK) { 3432 ALOGE("[%s] failed to set input port definition parameters.", 3433 mComponentName.c_str()); 3434 3435 return err; 3436 } 3437 3438 /* Output port configuration */ 3439 3440 OMX_VIDEO_CODINGTYPE compressionFormat; 3441 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3442 3443 if (err != OK) { 3444 return err; 3445 } 3446 3447 err = setVideoPortFormatType( 3448 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3449 3450 if (err != OK) { 3451 ALOGE("[%s] does not support compression format %d", 3452 mComponentName.c_str(), compressionFormat); 3453 3454 return err; 3455 } 3456 3457 def.nPortIndex = kPortIndexOutput; 3458 3459 err = mOMX->getParameter( 3460 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3461 3462 if (err != OK) { 3463 return err; 3464 } 3465 3466 video_def->nFrameWidth = width; 3467 video_def->nFrameHeight = height; 3468 video_def->xFramerate = 0; 3469 video_def->nBitrate = bitrate; 3470 video_def->eCompressionFormat = compressionFormat; 3471 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3472 3473 err = mOMX->setParameter( 3474 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3475 3476 if (err != OK) { 3477 ALOGE("[%s] failed to set output port definition parameters.", 3478 mComponentName.c_str()); 3479 3480 return err; 3481 } 3482 3483 int32_t intraRefreshPeriod = 0; 3484 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3485 && intraRefreshPeriod >= 0) { 3486 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3487 if (err != OK) { 3488 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3489 mComponentName.c_str()); 3490 err = OK; 3491 } 3492 } 3493 3494 switch (compressionFormat) { 3495 case OMX_VIDEO_CodingMPEG4: 3496 err = setupMPEG4EncoderParameters(msg); 3497 break; 3498 3499 case OMX_VIDEO_CodingH263: 3500 err = setupH263EncoderParameters(msg); 3501 break; 3502 3503 case OMX_VIDEO_CodingAVC: 3504 err = setupAVCEncoderParameters(msg); 3505 break; 3506 3507 case OMX_VIDEO_CodingHEVC: 3508 err = setupHEVCEncoderParameters(msg); 3509 break; 3510 3511 case OMX_VIDEO_CodingVP8: 3512 case OMX_VIDEO_CodingVP9: 3513 err = setupVPXEncoderParameters(msg); 3514 break; 3515 3516 default: 3517 break; 3518 } 3519 3520 // Set up color aspects on input, but propagate them to the output format, as they will 3521 // not be read back from encoder. 3522 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3523 if (err == ERROR_UNSUPPORTED) { 3524 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3525 err = OK; 3526 } 3527 3528 if (err == OK) { 3529 ALOGI("setupVideoEncoder succeeded"); 3530 } 3531 3532 return err; 3533} 3534 3535status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3536 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3537 InitOMXParams(¶ms); 3538 params.nPortIndex = kPortIndexOutput; 3539 3540 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3541 3542 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3543 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3544 int32_t mbs; 3545 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3546 return INVALID_OPERATION; 3547 } 3548 params.nCirMBs = mbs; 3549 } 3550 3551 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3552 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3553 int32_t mbs; 3554 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3555 return INVALID_OPERATION; 3556 } 3557 params.nAirMBs = mbs; 3558 3559 int32_t ref; 3560 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3561 return INVALID_OPERATION; 3562 } 3563 params.nAirRef = ref; 3564 } 3565 3566 status_t err = mOMX->setParameter( 3567 mNode, OMX_IndexParamVideoIntraRefresh, 3568 ¶ms, sizeof(params)); 3569 return err; 3570} 3571 3572static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) { 3573 if (iFramesInterval < 0) { 3574 return 0xFFFFFFFF; 3575 } else if (iFramesInterval == 0) { 3576 return 0; 3577 } 3578 OMX_U32 ret = frameRate * iFramesInterval; 3579 return ret; 3580} 3581 3582static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 3583 int32_t tmp; 3584 if (!msg->findInt32("bitrate-mode", &tmp)) { 3585 return OMX_Video_ControlRateVariable; 3586 } 3587 3588 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 3589} 3590 3591status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 3592 int32_t bitrate, iFrameInterval; 3593 if (!msg->findInt32("bitrate", &bitrate) 3594 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3595 return INVALID_OPERATION; 3596 } 3597 3598 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3599 3600 float frameRate; 3601 if (!msg->findFloat("frame-rate", &frameRate)) { 3602 int32_t tmp; 3603 if (!msg->findInt32("frame-rate", &tmp)) { 3604 return INVALID_OPERATION; 3605 } 3606 frameRate = (float)tmp; 3607 } 3608 3609 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 3610 InitOMXParams(&mpeg4type); 3611 mpeg4type.nPortIndex = kPortIndexOutput; 3612 3613 status_t err = mOMX->getParameter( 3614 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3615 3616 if (err != OK) { 3617 return err; 3618 } 3619 3620 mpeg4type.nSliceHeaderSpacing = 0; 3621 mpeg4type.bSVH = OMX_FALSE; 3622 mpeg4type.bGov = OMX_FALSE; 3623 3624 mpeg4type.nAllowedPictureTypes = 3625 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3626 3627 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3628 if (mpeg4type.nPFrames == 0) { 3629 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3630 } 3631 mpeg4type.nBFrames = 0; 3632 mpeg4type.nIDCVLCThreshold = 0; 3633 mpeg4type.bACPred = OMX_TRUE; 3634 mpeg4type.nMaxPacketSize = 256; 3635 mpeg4type.nTimeIncRes = 1000; 3636 mpeg4type.nHeaderExtension = 0; 3637 mpeg4type.bReversibleVLC = OMX_FALSE; 3638 3639 int32_t profile; 3640 if (msg->findInt32("profile", &profile)) { 3641 int32_t level; 3642 if (!msg->findInt32("level", &level)) { 3643 return INVALID_OPERATION; 3644 } 3645 3646 err = verifySupportForProfileAndLevel(profile, level); 3647 3648 if (err != OK) { 3649 return err; 3650 } 3651 3652 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 3653 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 3654 } 3655 3656 err = mOMX->setParameter( 3657 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3658 3659 if (err != OK) { 3660 return err; 3661 } 3662 3663 err = configureBitrate(bitrate, bitrateMode); 3664 3665 if (err != OK) { 3666 return err; 3667 } 3668 3669 return setupErrorCorrectionParameters(); 3670} 3671 3672status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 3673 int32_t bitrate, iFrameInterval; 3674 if (!msg->findInt32("bitrate", &bitrate) 3675 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3676 return INVALID_OPERATION; 3677 } 3678 3679 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3680 3681 float frameRate; 3682 if (!msg->findFloat("frame-rate", &frameRate)) { 3683 int32_t tmp; 3684 if (!msg->findInt32("frame-rate", &tmp)) { 3685 return INVALID_OPERATION; 3686 } 3687 frameRate = (float)tmp; 3688 } 3689 3690 OMX_VIDEO_PARAM_H263TYPE h263type; 3691 InitOMXParams(&h263type); 3692 h263type.nPortIndex = kPortIndexOutput; 3693 3694 status_t err = mOMX->getParameter( 3695 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3696 3697 if (err != OK) { 3698 return err; 3699 } 3700 3701 h263type.nAllowedPictureTypes = 3702 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3703 3704 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3705 if (h263type.nPFrames == 0) { 3706 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3707 } 3708 h263type.nBFrames = 0; 3709 3710 int32_t profile; 3711 if (msg->findInt32("profile", &profile)) { 3712 int32_t level; 3713 if (!msg->findInt32("level", &level)) { 3714 return INVALID_OPERATION; 3715 } 3716 3717 err = verifySupportForProfileAndLevel(profile, level); 3718 3719 if (err != OK) { 3720 return err; 3721 } 3722 3723 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 3724 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 3725 } 3726 3727 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 3728 h263type.bForceRoundingTypeToZero = OMX_FALSE; 3729 h263type.nPictureHeaderRepetition = 0; 3730 h263type.nGOBHeaderInterval = 0; 3731 3732 err = mOMX->setParameter( 3733 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3734 3735 if (err != OK) { 3736 return err; 3737 } 3738 3739 err = configureBitrate(bitrate, bitrateMode); 3740 3741 if (err != OK) { 3742 return err; 3743 } 3744 3745 return setupErrorCorrectionParameters(); 3746} 3747 3748// static 3749int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 3750 int width, int height, int rate, int bitrate, 3751 OMX_VIDEO_AVCPROFILETYPE profile) { 3752 // convert bitrate to main/baseline profile kbps equivalent 3753 switch (profile) { 3754 case OMX_VIDEO_AVCProfileHigh10: 3755 bitrate = divUp(bitrate, 3000); break; 3756 case OMX_VIDEO_AVCProfileHigh: 3757 bitrate = divUp(bitrate, 1250); break; 3758 default: 3759 bitrate = divUp(bitrate, 1000); break; 3760 } 3761 3762 // convert size and rate to MBs 3763 width = divUp(width, 16); 3764 height = divUp(height, 16); 3765 int mbs = width * height; 3766 rate *= mbs; 3767 int maxDimension = max(width, height); 3768 3769 static const int limits[][5] = { 3770 /* MBps MB dim bitrate level */ 3771 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 3772 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 3773 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 3774 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 3775 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 3776 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 3777 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 3778 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 3779 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 3780 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 3781 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 3782 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 3783 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 3784 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 3785 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 3786 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 3787 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 3788 }; 3789 3790 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 3791 const int (&limit)[5] = limits[i]; 3792 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 3793 && bitrate <= limit[3]) { 3794 return limit[4]; 3795 } 3796 } 3797 return 0; 3798} 3799 3800status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 3801 int32_t bitrate, iFrameInterval; 3802 if (!msg->findInt32("bitrate", &bitrate) 3803 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3804 return INVALID_OPERATION; 3805 } 3806 3807 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3808 3809 float frameRate; 3810 if (!msg->findFloat("frame-rate", &frameRate)) { 3811 int32_t tmp; 3812 if (!msg->findInt32("frame-rate", &tmp)) { 3813 return INVALID_OPERATION; 3814 } 3815 frameRate = (float)tmp; 3816 } 3817 3818 status_t err = OK; 3819 int32_t intraRefreshMode = 0; 3820 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 3821 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 3822 if (err != OK) { 3823 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 3824 err, intraRefreshMode); 3825 return err; 3826 } 3827 } 3828 3829 OMX_VIDEO_PARAM_AVCTYPE h264type; 3830 InitOMXParams(&h264type); 3831 h264type.nPortIndex = kPortIndexOutput; 3832 3833 err = mOMX->getParameter( 3834 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 3835 3836 if (err != OK) { 3837 return err; 3838 } 3839 3840 h264type.nAllowedPictureTypes = 3841 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3842 3843 int32_t profile; 3844 if (msg->findInt32("profile", &profile)) { 3845 int32_t level; 3846 if (!msg->findInt32("level", &level)) { 3847 return INVALID_OPERATION; 3848 } 3849 3850 err = verifySupportForProfileAndLevel(profile, level); 3851 3852 if (err != OK) { 3853 return err; 3854 } 3855 3856 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 3857 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 3858 } 3859 3860 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 3861 h264type.nSliceHeaderSpacing = 0; 3862 h264type.bUseHadamard = OMX_TRUE; 3863 h264type.nRefFrames = 1; 3864 h264type.nBFrames = 0; 3865 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3866 if (h264type.nPFrames == 0) { 3867 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3868 } 3869 h264type.nRefIdx10ActiveMinus1 = 0; 3870 h264type.nRefIdx11ActiveMinus1 = 0; 3871 h264type.bEntropyCodingCABAC = OMX_FALSE; 3872 h264type.bWeightedPPrediction = OMX_FALSE; 3873 h264type.bconstIpred = OMX_FALSE; 3874 h264type.bDirect8x8Inference = OMX_FALSE; 3875 h264type.bDirectSpatialTemporal = OMX_FALSE; 3876 h264type.nCabacInitIdc = 0; 3877 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 3878 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 3879 h264type.nSliceHeaderSpacing = 0; 3880 h264type.bUseHadamard = OMX_TRUE; 3881 h264type.nRefFrames = 2; 3882 h264type.nBFrames = 1; 3883 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3884 h264type.nAllowedPictureTypes = 3885 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 3886 h264type.nRefIdx10ActiveMinus1 = 0; 3887 h264type.nRefIdx11ActiveMinus1 = 0; 3888 h264type.bEntropyCodingCABAC = OMX_TRUE; 3889 h264type.bWeightedPPrediction = OMX_TRUE; 3890 h264type.bconstIpred = OMX_TRUE; 3891 h264type.bDirect8x8Inference = OMX_TRUE; 3892 h264type.bDirectSpatialTemporal = OMX_TRUE; 3893 h264type.nCabacInitIdc = 1; 3894 } 3895 3896 if (h264type.nBFrames != 0) { 3897 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 3898 } 3899 3900 h264type.bEnableUEP = OMX_FALSE; 3901 h264type.bEnableFMO = OMX_FALSE; 3902 h264type.bEnableASO = OMX_FALSE; 3903 h264type.bEnableRS = OMX_FALSE; 3904 h264type.bFrameMBsOnly = OMX_TRUE; 3905 h264type.bMBAFF = OMX_FALSE; 3906 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 3907 3908 err = mOMX->setParameter( 3909 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 3910 3911 if (err != OK) { 3912 return err; 3913 } 3914 3915 return configureBitrate(bitrate, bitrateMode); 3916} 3917 3918status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 3919 int32_t bitrate, iFrameInterval; 3920 if (!msg->findInt32("bitrate", &bitrate) 3921 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3922 return INVALID_OPERATION; 3923 } 3924 3925 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3926 3927 float frameRate; 3928 if (!msg->findFloat("frame-rate", &frameRate)) { 3929 int32_t tmp; 3930 if (!msg->findInt32("frame-rate", &tmp)) { 3931 return INVALID_OPERATION; 3932 } 3933 frameRate = (float)tmp; 3934 } 3935 3936 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 3937 InitOMXParams(&hevcType); 3938 hevcType.nPortIndex = kPortIndexOutput; 3939 3940 status_t err = OK; 3941 err = mOMX->getParameter( 3942 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 3943 if (err != OK) { 3944 return err; 3945 } 3946 3947 int32_t profile; 3948 if (msg->findInt32("profile", &profile)) { 3949 int32_t level; 3950 if (!msg->findInt32("level", &level)) { 3951 return INVALID_OPERATION; 3952 } 3953 3954 err = verifySupportForProfileAndLevel(profile, level); 3955 if (err != OK) { 3956 return err; 3957 } 3958 3959 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 3960 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 3961 } 3962 // TODO: finer control? 3963 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 3964 3965 err = mOMX->setParameter( 3966 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 3967 if (err != OK) { 3968 return err; 3969 } 3970 3971 return configureBitrate(bitrate, bitrateMode); 3972} 3973 3974status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { 3975 int32_t bitrate; 3976 int32_t iFrameInterval = 0; 3977 size_t tsLayers = 0; 3978 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 3979 OMX_VIDEO_VPXTemporalLayerPatternNone; 3980 static const uint32_t kVp8LayerRateAlloction 3981 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 3982 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 3983 {100, 100, 100}, // 1 layer 3984 { 60, 100, 100}, // 2 layers {60%, 40%} 3985 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 3986 }; 3987 if (!msg->findInt32("bitrate", &bitrate)) { 3988 return INVALID_OPERATION; 3989 } 3990 msg->findInt32("i-frame-interval", &iFrameInterval); 3991 3992 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3993 3994 float frameRate; 3995 if (!msg->findFloat("frame-rate", &frameRate)) { 3996 int32_t tmp; 3997 if (!msg->findInt32("frame-rate", &tmp)) { 3998 return INVALID_OPERATION; 3999 } 4000 frameRate = (float)tmp; 4001 } 4002 4003 AString tsSchema; 4004 if (msg->findString("ts-schema", &tsSchema)) { 4005 if (tsSchema == "webrtc.vp8.1-layer") { 4006 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4007 tsLayers = 1; 4008 } else if (tsSchema == "webrtc.vp8.2-layer") { 4009 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4010 tsLayers = 2; 4011 } else if (tsSchema == "webrtc.vp8.3-layer") { 4012 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4013 tsLayers = 3; 4014 } else { 4015 ALOGW("Unsupported ts-schema [%s]", tsSchema.c_str()); 4016 } 4017 } 4018 4019 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4020 InitOMXParams(&vp8type); 4021 vp8type.nPortIndex = kPortIndexOutput; 4022 status_t err = mOMX->getParameter( 4023 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4024 &vp8type, sizeof(vp8type)); 4025 4026 if (err == OK) { 4027 if (iFrameInterval > 0) { 4028 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4029 } 4030 vp8type.eTemporalPattern = pattern; 4031 vp8type.nTemporalLayerCount = tsLayers; 4032 if (tsLayers > 0) { 4033 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4034 vp8type.nTemporalLayerBitrateRatio[i] = 4035 kVp8LayerRateAlloction[tsLayers - 1][i]; 4036 } 4037 } 4038 if (bitrateMode == OMX_Video_ControlRateConstant) { 4039 vp8type.nMinQuantizer = 2; 4040 vp8type.nMaxQuantizer = 63; 4041 } 4042 4043 err = mOMX->setParameter( 4044 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4045 &vp8type, sizeof(vp8type)); 4046 if (err != OK) { 4047 ALOGW("Extended VP8 parameters set failed: %d", err); 4048 } 4049 } 4050 4051 return configureBitrate(bitrate, bitrateMode); 4052} 4053 4054status_t ACodec::verifySupportForProfileAndLevel( 4055 int32_t profile, int32_t level) { 4056 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4057 InitOMXParams(¶ms); 4058 params.nPortIndex = kPortIndexOutput; 4059 4060 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4061 params.nProfileIndex = index; 4062 status_t err = mOMX->getParameter( 4063 mNode, 4064 OMX_IndexParamVideoProfileLevelQuerySupported, 4065 ¶ms, 4066 sizeof(params)); 4067 4068 if (err != OK) { 4069 return err; 4070 } 4071 4072 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4073 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4074 4075 if (profile == supportedProfile && level <= supportedLevel) { 4076 return OK; 4077 } 4078 4079 if (index == kMaxIndicesToCheck) { 4080 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4081 mComponentName.c_str(), index, 4082 params.eProfile, params.eLevel); 4083 } 4084 } 4085 return ERROR_UNSUPPORTED; 4086} 4087 4088status_t ACodec::configureBitrate( 4089 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4090 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4091 InitOMXParams(&bitrateType); 4092 bitrateType.nPortIndex = kPortIndexOutput; 4093 4094 status_t err = mOMX->getParameter( 4095 mNode, OMX_IndexParamVideoBitrate, 4096 &bitrateType, sizeof(bitrateType)); 4097 4098 if (err != OK) { 4099 return err; 4100 } 4101 4102 bitrateType.eControlRate = bitrateMode; 4103 bitrateType.nTargetBitrate = bitrate; 4104 4105 return mOMX->setParameter( 4106 mNode, OMX_IndexParamVideoBitrate, 4107 &bitrateType, sizeof(bitrateType)); 4108} 4109 4110status_t ACodec::setupErrorCorrectionParameters() { 4111 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4112 InitOMXParams(&errorCorrectionType); 4113 errorCorrectionType.nPortIndex = kPortIndexOutput; 4114 4115 status_t err = mOMX->getParameter( 4116 mNode, OMX_IndexParamVideoErrorCorrection, 4117 &errorCorrectionType, sizeof(errorCorrectionType)); 4118 4119 if (err != OK) { 4120 return OK; // Optional feature. Ignore this failure 4121 } 4122 4123 errorCorrectionType.bEnableHEC = OMX_FALSE; 4124 errorCorrectionType.bEnableResync = OMX_TRUE; 4125 errorCorrectionType.nResynchMarkerSpacing = 256; 4126 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4127 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4128 4129 return mOMX->setParameter( 4130 mNode, OMX_IndexParamVideoErrorCorrection, 4131 &errorCorrectionType, sizeof(errorCorrectionType)); 4132} 4133 4134status_t ACodec::setVideoFormatOnPort( 4135 OMX_U32 portIndex, 4136 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4137 float frameRate) { 4138 OMX_PARAM_PORTDEFINITIONTYPE def; 4139 InitOMXParams(&def); 4140 def.nPortIndex = portIndex; 4141 4142 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4143 4144 status_t err = mOMX->getParameter( 4145 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4146 if (err != OK) { 4147 return err; 4148 } 4149 4150 if (portIndex == kPortIndexInput) { 4151 // XXX Need a (much) better heuristic to compute input buffer sizes. 4152 const size_t X = 64 * 1024; 4153 if (def.nBufferSize < X) { 4154 def.nBufferSize = X; 4155 } 4156 } 4157 4158 if (def.eDomain != OMX_PortDomainVideo) { 4159 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4160 return FAILED_TRANSACTION; 4161 } 4162 4163 video_def->nFrameWidth = width; 4164 video_def->nFrameHeight = height; 4165 4166 if (portIndex == kPortIndexInput) { 4167 video_def->eCompressionFormat = compressionFormat; 4168 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4169 if (frameRate >= 0) { 4170 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4171 } 4172 } 4173 4174 err = mOMX->setParameter( 4175 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4176 4177 return err; 4178} 4179 4180status_t ACodec::initNativeWindow() { 4181 if (mNativeWindow != NULL) { 4182 return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4183 } 4184 4185 mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4186 return OK; 4187} 4188 4189size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4190 size_t n = 0; 4191 4192 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4193 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4194 4195 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4196 ++n; 4197 } 4198 } 4199 4200 return n; 4201} 4202 4203size_t ACodec::countBuffersOwnedByNativeWindow() const { 4204 size_t n = 0; 4205 4206 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4207 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4208 4209 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4210 ++n; 4211 } 4212 } 4213 4214 return n; 4215} 4216 4217void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4218 if (mNativeWindow == NULL) { 4219 return; 4220 } 4221 4222 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4223 && dequeueBufferFromNativeWindow() != NULL) { 4224 // these buffers will be submitted as regular buffers; account for this 4225 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4226 --mMetadataBuffersToSubmit; 4227 } 4228 } 4229} 4230 4231bool ACodec::allYourBuffersAreBelongToUs( 4232 OMX_U32 portIndex) { 4233 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4234 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4235 4236 if (info->mStatus != BufferInfo::OWNED_BY_US 4237 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4238 ALOGV("[%s] Buffer %u on port %u still has status %d", 4239 mComponentName.c_str(), 4240 info->mBufferID, portIndex, info->mStatus); 4241 return false; 4242 } 4243 } 4244 4245 return true; 4246} 4247 4248bool ACodec::allYourBuffersAreBelongToUs() { 4249 return allYourBuffersAreBelongToUs(kPortIndexInput) 4250 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4251} 4252 4253void ACodec::deferMessage(const sp<AMessage> &msg) { 4254 mDeferredQueue.push_back(msg); 4255} 4256 4257void ACodec::processDeferredMessages() { 4258 List<sp<AMessage> > queue = mDeferredQueue; 4259 mDeferredQueue.clear(); 4260 4261 List<sp<AMessage> >::iterator it = queue.begin(); 4262 while (it != queue.end()) { 4263 onMessageReceived(*it++); 4264 } 4265} 4266 4267// static 4268bool ACodec::describeDefaultColorFormat(DescribeColorFormat2Params ¶ms) { 4269 MediaImage2 &image = params.sMediaImage; 4270 memset(&image, 0, sizeof(image)); 4271 4272 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4273 image.mNumPlanes = 0; 4274 4275 const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat; 4276 image.mWidth = params.nFrameWidth; 4277 image.mHeight = params.nFrameHeight; 4278 4279 // only supporting YUV420 4280 if (fmt != OMX_COLOR_FormatYUV420Planar && 4281 fmt != OMX_COLOR_FormatYUV420PackedPlanar && 4282 fmt != OMX_COLOR_FormatYUV420SemiPlanar && 4283 fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar && 4284 fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) { 4285 ALOGW("do not know color format 0x%x = %d", fmt, fmt); 4286 return false; 4287 } 4288 4289 // TEMPORARY FIX for some vendors that advertise sliceHeight as 0 4290 if (params.nStride != 0 && params.nSliceHeight == 0) { 4291 ALOGW("using sliceHeight=%u instead of what codec advertised (=0)", 4292 params.nFrameHeight); 4293 params.nSliceHeight = params.nFrameHeight; 4294 } 4295 4296 // we need stride and slice-height to be non-zero and sensible. These values were chosen to 4297 // prevent integer overflows further down the line, and do not indicate support for 4298 // 32kx32k video. 4299 if (params.nStride == 0 || params.nSliceHeight == 0 4300 || params.nStride > 32768 || params.nSliceHeight > 32768) { 4301 ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u", 4302 fmt, fmt, params.nStride, params.nSliceHeight); 4303 return false; 4304 } 4305 4306 // set-up YUV format 4307 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV; 4308 image.mNumPlanes = 3; 4309 image.mBitDepth = 8; 4310 image.mBitDepthAllocated = 8; 4311 image.mPlane[image.Y].mOffset = 0; 4312 image.mPlane[image.Y].mColInc = 1; 4313 image.mPlane[image.Y].mRowInc = params.nStride; 4314 image.mPlane[image.Y].mHorizSubsampling = 1; 4315 image.mPlane[image.Y].mVertSubsampling = 1; 4316 4317 switch ((int)fmt) { 4318 case HAL_PIXEL_FORMAT_YV12: 4319 if (params.bUsingNativeBuffers) { 4320 size_t ystride = align(params.nStride, 16); 4321 size_t cstride = align(params.nStride / 2, 16); 4322 image.mPlane[image.Y].mRowInc = ystride; 4323 4324 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight; 4325 image.mPlane[image.V].mColInc = 1; 4326 image.mPlane[image.V].mRowInc = cstride; 4327 image.mPlane[image.V].mHorizSubsampling = 2; 4328 image.mPlane[image.V].mVertSubsampling = 2; 4329 4330 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset 4331 + (cstride * params.nSliceHeight / 2); 4332 image.mPlane[image.U].mColInc = 1; 4333 image.mPlane[image.U].mRowInc = cstride; 4334 image.mPlane[image.U].mHorizSubsampling = 2; 4335 image.mPlane[image.U].mVertSubsampling = 2; 4336 break; 4337 } else { 4338 // fall through as YV12 is used for YUV420Planar by some codecs 4339 } 4340 4341 case OMX_COLOR_FormatYUV420Planar: 4342 case OMX_COLOR_FormatYUV420PackedPlanar: 4343 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4344 image.mPlane[image.U].mColInc = 1; 4345 image.mPlane[image.U].mRowInc = params.nStride / 2; 4346 image.mPlane[image.U].mHorizSubsampling = 2; 4347 image.mPlane[image.U].mVertSubsampling = 2; 4348 4349 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset 4350 + (params.nStride * params.nSliceHeight / 4); 4351 image.mPlane[image.V].mColInc = 1; 4352 image.mPlane[image.V].mRowInc = params.nStride / 2; 4353 image.mPlane[image.V].mHorizSubsampling = 2; 4354 image.mPlane[image.V].mVertSubsampling = 2; 4355 break; 4356 4357 case OMX_COLOR_FormatYUV420SemiPlanar: 4358 // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder 4359 case OMX_COLOR_FormatYUV420PackedSemiPlanar: 4360 // NV12 4361 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4362 image.mPlane[image.U].mColInc = 2; 4363 image.mPlane[image.U].mRowInc = params.nStride; 4364 image.mPlane[image.U].mHorizSubsampling = 2; 4365 image.mPlane[image.U].mVertSubsampling = 2; 4366 4367 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1; 4368 image.mPlane[image.V].mColInc = 2; 4369 image.mPlane[image.V].mRowInc = params.nStride; 4370 image.mPlane[image.V].mHorizSubsampling = 2; 4371 image.mPlane[image.V].mVertSubsampling = 2; 4372 break; 4373 4374 default: 4375 TRESPASS(); 4376 } 4377 return true; 4378} 4379 4380// static 4381bool ACodec::describeColorFormat( 4382 const sp<IOMX> &omx, IOMX::node_id node, 4383 DescribeColorFormat2Params &describeParams) 4384{ 4385 OMX_INDEXTYPE describeColorFormatIndex; 4386 if (omx->getExtensionIndex( 4387 node, "OMX.google.android.index.describeColorFormat", 4388 &describeColorFormatIndex) == OK) { 4389 DescribeColorFormatParams describeParamsV1(describeParams); 4390 if (omx->getParameter( 4391 node, describeColorFormatIndex, 4392 &describeParamsV1, sizeof(describeParamsV1)) == OK) { 4393 describeParams.initFromV1(describeParamsV1); 4394 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4395 } 4396 } else if (omx->getExtensionIndex( 4397 node, "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK 4398 && omx->getParameter( 4399 node, describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) { 4400 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4401 } 4402 4403 return describeDefaultColorFormat(describeParams); 4404} 4405 4406// static 4407bool ACodec::isFlexibleColorFormat( 4408 const sp<IOMX> &omx, IOMX::node_id node, 4409 uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) { 4410 DescribeColorFormat2Params describeParams; 4411 InitOMXParams(&describeParams); 4412 describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; 4413 // reasonable dummy values 4414 describeParams.nFrameWidth = 128; 4415 describeParams.nFrameHeight = 128; 4416 describeParams.nStride = 128; 4417 describeParams.nSliceHeight = 128; 4418 describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers; 4419 4420 CHECK(flexibleEquivalent != NULL); 4421 4422 if (!describeColorFormat(omx, node, describeParams)) { 4423 return false; 4424 } 4425 4426 const MediaImage2 &img = describeParams.sMediaImage; 4427 if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) { 4428 if (img.mNumPlanes != 3 4429 || img.mPlane[img.Y].mHorizSubsampling != 1 4430 || img.mPlane[img.Y].mVertSubsampling != 1) { 4431 return false; 4432 } 4433 4434 // YUV 420 4435 if (img.mPlane[img.U].mHorizSubsampling == 2 4436 && img.mPlane[img.U].mVertSubsampling == 2 4437 && img.mPlane[img.V].mHorizSubsampling == 2 4438 && img.mPlane[img.V].mVertSubsampling == 2) { 4439 // possible flexible YUV420 format 4440 if (img.mBitDepth <= 8) { 4441 *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible; 4442 return true; 4443 } 4444 } 4445 } 4446 return false; 4447} 4448 4449status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4450 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4451 OMX_PARAM_PORTDEFINITIONTYPE def; 4452 InitOMXParams(&def); 4453 def.nPortIndex = portIndex; 4454 4455 status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4456 if (err != OK) { 4457 return err; 4458 } 4459 4460 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4461 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4462 return BAD_VALUE; 4463 } 4464 4465 switch (def.eDomain) { 4466 case OMX_PortDomainVideo: 4467 { 4468 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4469 switch ((int)videoDef->eCompressionFormat) { 4470 case OMX_VIDEO_CodingUnused: 4471 { 4472 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4473 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4474 4475 notify->setInt32("stride", videoDef->nStride); 4476 notify->setInt32("slice-height", videoDef->nSliceHeight); 4477 notify->setInt32("color-format", videoDef->eColorFormat); 4478 4479 if (mNativeWindow == NULL) { 4480 DescribeColorFormat2Params describeParams; 4481 InitOMXParams(&describeParams); 4482 describeParams.eColorFormat = videoDef->eColorFormat; 4483 describeParams.nFrameWidth = videoDef->nFrameWidth; 4484 describeParams.nFrameHeight = videoDef->nFrameHeight; 4485 describeParams.nStride = videoDef->nStride; 4486 describeParams.nSliceHeight = videoDef->nSliceHeight; 4487 describeParams.bUsingNativeBuffers = OMX_FALSE; 4488 4489 if (describeColorFormat(mOMX, mNode, describeParams)) { 4490 notify->setBuffer( 4491 "image-data", 4492 ABuffer::CreateAsCopy( 4493 &describeParams.sMediaImage, 4494 sizeof(describeParams.sMediaImage))); 4495 4496 MediaImage2 &img = describeParams.sMediaImage; 4497 MediaImage2::PlaneInfo *plane = img.mPlane; 4498 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4499 mComponentName.c_str(), img.mWidth, img.mHeight, 4500 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4501 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4502 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4503 } 4504 } 4505 4506 int32_t width = (int32_t)videoDef->nFrameWidth; 4507 int32_t height = (int32_t)videoDef->nFrameHeight; 4508 4509 if (portIndex == kPortIndexOutput) { 4510 OMX_CONFIG_RECTTYPE rect; 4511 InitOMXParams(&rect); 4512 rect.nPortIndex = portIndex; 4513 4514 if (mOMX->getConfig( 4515 mNode, 4516 (portIndex == kPortIndexOutput ? 4517 OMX_IndexConfigCommonOutputCrop : 4518 OMX_IndexConfigCommonInputCrop), 4519 &rect, sizeof(rect)) != OK) { 4520 rect.nLeft = 0; 4521 rect.nTop = 0; 4522 rect.nWidth = videoDef->nFrameWidth; 4523 rect.nHeight = videoDef->nFrameHeight; 4524 } 4525 4526 if (rect.nLeft < 0 || 4527 rect.nTop < 0 || 4528 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4529 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4530 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 4531 rect.nLeft, rect.nTop, 4532 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 4533 videoDef->nFrameWidth, videoDef->nFrameHeight); 4534 return BAD_VALUE; 4535 } 4536 4537 notify->setRect( 4538 "crop", 4539 rect.nLeft, 4540 rect.nTop, 4541 rect.nLeft + rect.nWidth - 1, 4542 rect.nTop + rect.nHeight - 1); 4543 4544 width = rect.nWidth; 4545 height = rect.nHeight; 4546 4547 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4548 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4549 width, height, mConfigFormat, notify, 4550 mUsingNativeWindow ? &dataSpace : NULL); 4551 if (mUsingNativeWindow) { 4552 notify->setInt32("android._dataspace", dataSpace); 4553 } 4554 } else { 4555 (void)getInputColorAspectsForVideoEncoder(notify); 4556 } 4557 4558 break; 4559 } 4560 4561 case OMX_VIDEO_CodingVP8: 4562 case OMX_VIDEO_CodingVP9: 4563 { 4564 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4565 InitOMXParams(&vp8type); 4566 vp8type.nPortIndex = kPortIndexOutput; 4567 status_t err = mOMX->getParameter( 4568 mNode, 4569 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4570 &vp8type, 4571 sizeof(vp8type)); 4572 4573 if (err == OK) { 4574 AString tsSchema = "none"; 4575 if (vp8type.eTemporalPattern 4576 == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { 4577 switch (vp8type.nTemporalLayerCount) { 4578 case 1: 4579 { 4580 tsSchema = "webrtc.vp8.1-layer"; 4581 break; 4582 } 4583 case 2: 4584 { 4585 tsSchema = "webrtc.vp8.2-layer"; 4586 break; 4587 } 4588 case 3: 4589 { 4590 tsSchema = "webrtc.vp8.3-layer"; 4591 break; 4592 } 4593 default: 4594 { 4595 break; 4596 } 4597 } 4598 } 4599 notify->setString("ts-schema", tsSchema); 4600 } 4601 // Fall through to set up mime. 4602 } 4603 4604 default: 4605 { 4606 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 4607 // should be CodingUnused 4608 ALOGE("Raw port video compression format is %s(%d)", 4609 asString(videoDef->eCompressionFormat), 4610 videoDef->eCompressionFormat); 4611 return BAD_VALUE; 4612 } 4613 AString mime; 4614 if (GetMimeTypeForVideoCoding( 4615 videoDef->eCompressionFormat, &mime) != OK) { 4616 notify->setString("mime", "application/octet-stream"); 4617 } else { 4618 notify->setString("mime", mime.c_str()); 4619 } 4620 uint32_t intraRefreshPeriod = 0; 4621 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 4622 && intraRefreshPeriod > 0) { 4623 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 4624 } 4625 break; 4626 } 4627 } 4628 notify->setInt32("width", videoDef->nFrameWidth); 4629 notify->setInt32("height", videoDef->nFrameHeight); 4630 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 4631 portIndex == kPortIndexInput ? "input" : "output", 4632 notify->debugString().c_str()); 4633 4634 break; 4635 } 4636 4637 case OMX_PortDomainAudio: 4638 { 4639 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 4640 4641 switch ((int)audioDef->eEncoding) { 4642 case OMX_AUDIO_CodingPCM: 4643 { 4644 OMX_AUDIO_PARAM_PCMMODETYPE params; 4645 InitOMXParams(¶ms); 4646 params.nPortIndex = portIndex; 4647 4648 err = mOMX->getParameter( 4649 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4650 if (err != OK) { 4651 return err; 4652 } 4653 4654 if (params.nChannels <= 0 4655 || (params.nChannels != 1 && !params.bInterleaved) 4656 || params.nBitPerSample != 16u 4657 || params.eNumData != OMX_NumericalDataSigned 4658 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 4659 ALOGE("unsupported PCM port: %u channels%s, %u-bit, %s(%d), %s(%d) mode ", 4660 params.nChannels, 4661 params.bInterleaved ? " interleaved" : "", 4662 params.nBitPerSample, 4663 asString(params.eNumData), params.eNumData, 4664 asString(params.ePCMMode), params.ePCMMode); 4665 return FAILED_TRANSACTION; 4666 } 4667 4668 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 4669 notify->setInt32("channel-count", params.nChannels); 4670 notify->setInt32("sample-rate", params.nSamplingRate); 4671 4672 if (mChannelMaskPresent) { 4673 notify->setInt32("channel-mask", mChannelMask); 4674 } 4675 break; 4676 } 4677 4678 case OMX_AUDIO_CodingAAC: 4679 { 4680 OMX_AUDIO_PARAM_AACPROFILETYPE params; 4681 InitOMXParams(¶ms); 4682 params.nPortIndex = portIndex; 4683 4684 err = mOMX->getParameter( 4685 mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 4686 if (err != OK) { 4687 return err; 4688 } 4689 4690 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 4691 notify->setInt32("channel-count", params.nChannels); 4692 notify->setInt32("sample-rate", params.nSampleRate); 4693 break; 4694 } 4695 4696 case OMX_AUDIO_CodingAMR: 4697 { 4698 OMX_AUDIO_PARAM_AMRTYPE params; 4699 InitOMXParams(¶ms); 4700 params.nPortIndex = portIndex; 4701 4702 err = mOMX->getParameter( 4703 mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 4704 if (err != OK) { 4705 return err; 4706 } 4707 4708 notify->setInt32("channel-count", 1); 4709 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 4710 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 4711 notify->setInt32("sample-rate", 16000); 4712 } else { 4713 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 4714 notify->setInt32("sample-rate", 8000); 4715 } 4716 break; 4717 } 4718 4719 case OMX_AUDIO_CodingFLAC: 4720 { 4721 OMX_AUDIO_PARAM_FLACTYPE params; 4722 InitOMXParams(¶ms); 4723 params.nPortIndex = portIndex; 4724 4725 err = mOMX->getParameter( 4726 mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 4727 if (err != OK) { 4728 return err; 4729 } 4730 4731 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 4732 notify->setInt32("channel-count", params.nChannels); 4733 notify->setInt32("sample-rate", params.nSampleRate); 4734 break; 4735 } 4736 4737 case OMX_AUDIO_CodingMP3: 4738 { 4739 OMX_AUDIO_PARAM_MP3TYPE params; 4740 InitOMXParams(¶ms); 4741 params.nPortIndex = portIndex; 4742 4743 err = mOMX->getParameter( 4744 mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 4745 if (err != OK) { 4746 return err; 4747 } 4748 4749 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 4750 notify->setInt32("channel-count", params.nChannels); 4751 notify->setInt32("sample-rate", params.nSampleRate); 4752 break; 4753 } 4754 4755 case OMX_AUDIO_CodingVORBIS: 4756 { 4757 OMX_AUDIO_PARAM_VORBISTYPE params; 4758 InitOMXParams(¶ms); 4759 params.nPortIndex = portIndex; 4760 4761 err = mOMX->getParameter( 4762 mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 4763 if (err != OK) { 4764 return err; 4765 } 4766 4767 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 4768 notify->setInt32("channel-count", params.nChannels); 4769 notify->setInt32("sample-rate", params.nSampleRate); 4770 break; 4771 } 4772 4773 case OMX_AUDIO_CodingAndroidAC3: 4774 { 4775 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 4776 InitOMXParams(¶ms); 4777 params.nPortIndex = portIndex; 4778 4779 err = mOMX->getParameter( 4780 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 4781 ¶ms, sizeof(params)); 4782 if (err != OK) { 4783 return err; 4784 } 4785 4786 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 4787 notify->setInt32("channel-count", params.nChannels); 4788 notify->setInt32("sample-rate", params.nSampleRate); 4789 break; 4790 } 4791 4792 case OMX_AUDIO_CodingAndroidEAC3: 4793 { 4794 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 4795 InitOMXParams(¶ms); 4796 params.nPortIndex = portIndex; 4797 4798 err = mOMX->getParameter( 4799 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 4800 ¶ms, sizeof(params)); 4801 if (err != OK) { 4802 return err; 4803 } 4804 4805 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 4806 notify->setInt32("channel-count", params.nChannels); 4807 notify->setInt32("sample-rate", params.nSampleRate); 4808 break; 4809 } 4810 4811 case OMX_AUDIO_CodingAndroidOPUS: 4812 { 4813 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 4814 InitOMXParams(¶ms); 4815 params.nPortIndex = portIndex; 4816 4817 err = mOMX->getParameter( 4818 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 4819 ¶ms, sizeof(params)); 4820 if (err != OK) { 4821 return err; 4822 } 4823 4824 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 4825 notify->setInt32("channel-count", params.nChannels); 4826 notify->setInt32("sample-rate", params.nSampleRate); 4827 break; 4828 } 4829 4830 case OMX_AUDIO_CodingG711: 4831 { 4832 OMX_AUDIO_PARAM_PCMMODETYPE params; 4833 InitOMXParams(¶ms); 4834 params.nPortIndex = portIndex; 4835 4836 err = mOMX->getParameter( 4837 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4838 if (err != OK) { 4839 return err; 4840 } 4841 4842 const char *mime = NULL; 4843 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 4844 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 4845 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 4846 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 4847 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 4848 mime = MEDIA_MIMETYPE_AUDIO_RAW; 4849 } 4850 notify->setString("mime", mime); 4851 notify->setInt32("channel-count", params.nChannels); 4852 notify->setInt32("sample-rate", params.nSamplingRate); 4853 break; 4854 } 4855 4856 case OMX_AUDIO_CodingGSMFR: 4857 { 4858 OMX_AUDIO_PARAM_PCMMODETYPE params; 4859 InitOMXParams(¶ms); 4860 params.nPortIndex = portIndex; 4861 4862 err = mOMX->getParameter( 4863 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4864 if (err != OK) { 4865 return err; 4866 } 4867 4868 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 4869 notify->setInt32("channel-count", params.nChannels); 4870 notify->setInt32("sample-rate", params.nSamplingRate); 4871 break; 4872 } 4873 4874 default: 4875 ALOGE("Unsupported audio coding: %s(%d)\n", 4876 asString(audioDef->eEncoding), audioDef->eEncoding); 4877 return BAD_TYPE; 4878 } 4879 break; 4880 } 4881 4882 default: 4883 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 4884 return BAD_TYPE; 4885 } 4886 4887 return OK; 4888} 4889 4890void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 4891 // aspects are normally communicated in ColorAspects 4892 int32_t range, standard, transfer; 4893 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 4894 4895 // if some aspects are unspecified, use dataspace fields 4896 if (range != 0) { 4897 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 4898 } 4899 if (standard != 0) { 4900 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 4901 } 4902 if (transfer != 0) { 4903 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 4904 } 4905 4906 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 4907 if (range != 0) { 4908 mOutputFormat->setInt32("color-range", range); 4909 } 4910 if (standard != 0) { 4911 mOutputFormat->setInt32("color-standard", standard); 4912 } 4913 if (transfer != 0) { 4914 mOutputFormat->setInt32("color-transfer", transfer); 4915 } 4916 4917 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 4918 "(R:%d(%s), S:%d(%s), T:%d(%s))", 4919 dataSpace, 4920 aspects.mRange, asString(aspects.mRange), 4921 aspects.mPrimaries, asString(aspects.mPrimaries), 4922 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 4923 aspects.mTransfer, asString(aspects.mTransfer), 4924 range, asString((ColorRange)range), 4925 standard, asString((ColorStandard)standard), 4926 transfer, asString((ColorTransfer)transfer)); 4927} 4928 4929void ACodec::onOutputFormatChanged() { 4930 // store new output format 4931 mOutputFormat = mBaseOutputFormat->dup(); 4932 4933 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 4934 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 4935 return; 4936 } 4937 4938 if (mTunneled) { 4939 sendFormatChange(); 4940 } 4941} 4942 4943void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 4944 AString mime; 4945 CHECK(mOutputFormat->findString("mime", &mime)); 4946 4947 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 4948 // notify renderer of the crop change and dataspace change 4949 // NOTE: native window uses extended right-bottom coordinate 4950 int32_t left, top, right, bottom; 4951 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 4952 notify->setRect("crop", left, top, right + 1, bottom + 1); 4953 } 4954 4955 int32_t dataSpace; 4956 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 4957 notify->setInt32("dataspace", dataSpace); 4958 } 4959 } 4960} 4961 4962void ACodec::sendFormatChange() { 4963 AString mime; 4964 CHECK(mOutputFormat->findString("mime", &mime)); 4965 4966 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 4967 int32_t channelCount; 4968 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 4969 if (mSkipCutBuffer != NULL) { 4970 size_t prevbufsize = mSkipCutBuffer->size(); 4971 if (prevbufsize != 0) { 4972 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 4973 } 4974 } 4975 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 4976 } 4977 4978 sp<AMessage> notify = mNotify->dup(); 4979 notify->setInt32("what", kWhatOutputFormatChanged); 4980 notify->setMessage("format", mOutputFormat); 4981 notify->post(); 4982 4983 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 4984 mLastOutputFormat = mOutputFormat; 4985} 4986 4987void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 4988 sp<AMessage> notify = mNotify->dup(); 4989 notify->setInt32("what", CodecBase::kWhatError); 4990 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 4991 4992 if (internalError == UNKNOWN_ERROR) { // find better error code 4993 const status_t omxStatus = statusFromOMXError(error); 4994 if (omxStatus != 0) { 4995 internalError = omxStatus; 4996 } else { 4997 ALOGW("Invalid OMX error %#x", error); 4998 } 4999 } 5000 5001 mFatalError = true; 5002 5003 notify->setInt32("err", internalError); 5004 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5005 notify->post(); 5006} 5007 5008//////////////////////////////////////////////////////////////////////////////// 5009 5010ACodec::PortDescription::PortDescription() { 5011} 5012 5013status_t ACodec::requestIDRFrame() { 5014 if (!mIsEncoder) { 5015 return ERROR_UNSUPPORTED; 5016 } 5017 5018 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5019 InitOMXParams(¶ms); 5020 5021 params.nPortIndex = kPortIndexOutput; 5022 params.IntraRefreshVOP = OMX_TRUE; 5023 5024 return mOMX->setConfig( 5025 mNode, 5026 OMX_IndexConfigVideoIntraVOPRefresh, 5027 ¶ms, 5028 sizeof(params)); 5029} 5030 5031void ACodec::PortDescription::addBuffer( 5032 IOMX::buffer_id id, const sp<ABuffer> &buffer, 5033 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) { 5034 mBufferIDs.push_back(id); 5035 mBuffers.push_back(buffer); 5036 mHandles.push_back(handle); 5037 mMemRefs.push_back(memRef); 5038} 5039 5040size_t ACodec::PortDescription::countBuffers() { 5041 return mBufferIDs.size(); 5042} 5043 5044IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5045 return mBufferIDs.itemAt(index); 5046} 5047 5048sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5049 return mBuffers.itemAt(index); 5050} 5051 5052sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const { 5053 return mHandles.itemAt(index); 5054} 5055 5056sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const { 5057 return mMemRefs.itemAt(index); 5058} 5059 5060//////////////////////////////////////////////////////////////////////////////// 5061 5062ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5063 : AState(parentState), 5064 mCodec(codec) { 5065} 5066 5067ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5068 OMX_U32 /* portIndex */) { 5069 return KEEP_BUFFERS; 5070} 5071 5072bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5073 switch (msg->what()) { 5074 case kWhatInputBufferFilled: 5075 { 5076 onInputBufferFilled(msg); 5077 break; 5078 } 5079 5080 case kWhatOutputBufferDrained: 5081 { 5082 onOutputBufferDrained(msg); 5083 break; 5084 } 5085 5086 case ACodec::kWhatOMXMessageList: 5087 { 5088 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5089 } 5090 5091 case ACodec::kWhatOMXMessageItem: 5092 { 5093 // no need to check as we already did it for kWhatOMXMessageList 5094 return onOMXMessage(msg); 5095 } 5096 5097 case ACodec::kWhatOMXMessage: 5098 { 5099 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5100 } 5101 5102 case ACodec::kWhatSetSurface: 5103 { 5104 sp<AReplyToken> replyID; 5105 CHECK(msg->senderAwaitsResponse(&replyID)); 5106 5107 sp<RefBase> obj; 5108 CHECK(msg->findObject("surface", &obj)); 5109 5110 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5111 5112 sp<AMessage> response = new AMessage; 5113 response->setInt32("err", err); 5114 response->postReply(replyID); 5115 break; 5116 } 5117 5118 case ACodec::kWhatCreateInputSurface: 5119 case ACodec::kWhatSetInputSurface: 5120 case ACodec::kWhatSignalEndOfInputStream: 5121 { 5122 // This may result in an app illegal state exception. 5123 ALOGE("Message 0x%x was not handled", msg->what()); 5124 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5125 return true; 5126 } 5127 5128 case ACodec::kWhatOMXDied: 5129 { 5130 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5131 ALOGE("OMX/mediaserver died, signalling error!"); 5132 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5133 break; 5134 } 5135 5136 case ACodec::kWhatReleaseCodecInstance: 5137 { 5138 ALOGI("[%s] forcing the release of codec", 5139 mCodec->mComponentName.c_str()); 5140 status_t err = mCodec->mOMX->freeNode(mCodec->mNode); 5141 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5142 mCodec->mComponentName.c_str(), err); 5143 sp<AMessage> notify = mCodec->mNotify->dup(); 5144 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5145 notify->post(); 5146 break; 5147 } 5148 5149 default: 5150 return false; 5151 } 5152 5153 return true; 5154} 5155 5156bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5157 // there is a possibility that this is an outstanding message for a 5158 // codec that we have already destroyed 5159 if (mCodec->mNode == 0) { 5160 ALOGI("ignoring message as already freed component: %s", 5161 msg->debugString().c_str()); 5162 return false; 5163 } 5164 5165 IOMX::node_id nodeID; 5166 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 5167 if (nodeID != mCodec->mNode) { 5168 ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode); 5169 return false; 5170 } 5171 return true; 5172} 5173 5174bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5175 sp<RefBase> obj; 5176 CHECK(msg->findObject("messages", &obj)); 5177 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5178 5179 bool receivedRenderedEvents = false; 5180 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5181 it != msgList->getList().cend(); ++it) { 5182 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5183 mCodec->handleMessage(*it); 5184 int32_t type; 5185 CHECK((*it)->findInt32("type", &type)); 5186 if (type == omx_message::FRAME_RENDERED) { 5187 receivedRenderedEvents = true; 5188 } 5189 } 5190 5191 if (receivedRenderedEvents) { 5192 // NOTE: all buffers are rendered in this case 5193 mCodec->notifyOfRenderedFrames(); 5194 } 5195 return true; 5196} 5197 5198bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5199 int32_t type; 5200 CHECK(msg->findInt32("type", &type)); 5201 5202 switch (type) { 5203 case omx_message::EVENT: 5204 { 5205 int32_t event, data1, data2; 5206 CHECK(msg->findInt32("event", &event)); 5207 CHECK(msg->findInt32("data1", &data1)); 5208 CHECK(msg->findInt32("data2", &data2)); 5209 5210 if (event == OMX_EventCmdComplete 5211 && data1 == OMX_CommandFlush 5212 && data2 == (int32_t)OMX_ALL) { 5213 // Use of this notification is not consistent across 5214 // implementations. We'll drop this notification and rely 5215 // on flush-complete notifications on the individual port 5216 // indices instead. 5217 5218 return true; 5219 } 5220 5221 return onOMXEvent( 5222 static_cast<OMX_EVENTTYPE>(event), 5223 static_cast<OMX_U32>(data1), 5224 static_cast<OMX_U32>(data2)); 5225 } 5226 5227 case omx_message::EMPTY_BUFFER_DONE: 5228 { 5229 IOMX::buffer_id bufferID; 5230 int32_t fenceFd; 5231 5232 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5233 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5234 5235 return onOMXEmptyBufferDone(bufferID, fenceFd); 5236 } 5237 5238 case omx_message::FILL_BUFFER_DONE: 5239 { 5240 IOMX::buffer_id bufferID; 5241 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5242 5243 int32_t rangeOffset, rangeLength, flags, fenceFd; 5244 int64_t timeUs; 5245 5246 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5247 CHECK(msg->findInt32("range_length", &rangeLength)); 5248 CHECK(msg->findInt32("flags", &flags)); 5249 CHECK(msg->findInt64("timestamp", &timeUs)); 5250 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5251 5252 return onOMXFillBufferDone( 5253 bufferID, 5254 (size_t)rangeOffset, (size_t)rangeLength, 5255 (OMX_U32)flags, 5256 timeUs, 5257 fenceFd); 5258 } 5259 5260 case omx_message::FRAME_RENDERED: 5261 { 5262 int64_t mediaTimeUs, systemNano; 5263 5264 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5265 CHECK(msg->findInt64("system_nano", &systemNano)); 5266 5267 return onOMXFrameRendered( 5268 mediaTimeUs, systemNano); 5269 } 5270 5271 default: 5272 ALOGE("Unexpected message type: %d", type); 5273 return false; 5274 } 5275} 5276 5277bool ACodec::BaseState::onOMXFrameRendered( 5278 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5279 // ignore outside of Executing and PortSettingsChanged states 5280 return true; 5281} 5282 5283bool ACodec::BaseState::onOMXEvent( 5284 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5285 if (event == OMX_EventDataSpaceChanged) { 5286 ColorAspects aspects; 5287 aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF); 5288 aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF); 5289 aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF); 5290 aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF); 5291 5292 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5293 return true; 5294 } 5295 5296 if (event != OMX_EventError) { 5297 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5298 mCodec->mComponentName.c_str(), event, data1, data2); 5299 5300 return false; 5301 } 5302 5303 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5304 5305 // verify OMX component sends back an error we expect. 5306 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5307 if (!isOMXError(omxError)) { 5308 ALOGW("Invalid OMX error %#x", omxError); 5309 omxError = OMX_ErrorUndefined; 5310 } 5311 mCodec->signalError(omxError); 5312 5313 return true; 5314} 5315 5316bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5317 ALOGV("[%s] onOMXEmptyBufferDone %u", 5318 mCodec->mComponentName.c_str(), bufferID); 5319 5320 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5321 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5322 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5323 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5324 mCodec->dumpBuffers(kPortIndexInput); 5325 if (fenceFd >= 0) { 5326 ::close(fenceFd); 5327 } 5328 return false; 5329 } 5330 info->mStatus = BufferInfo::OWNED_BY_US; 5331 5332 // input buffers cannot take fences, so wait for any fence now 5333 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5334 fenceFd = -1; 5335 5336 // still save fence for completeness 5337 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5338 5339 // We're in "store-metadata-in-buffers" mode, the underlying 5340 // OMX component had access to data that's implicitly refcounted 5341 // by this "MediaBuffer" object. Now that the OMX component has 5342 // told us that it's done with the input buffer, we can decrement 5343 // the mediaBuffer's reference count. 5344 info->mData->setMediaBufferBase(NULL); 5345 5346 PortMode mode = getPortMode(kPortIndexInput); 5347 5348 switch (mode) { 5349 case KEEP_BUFFERS: 5350 break; 5351 5352 case RESUBMIT_BUFFERS: 5353 postFillThisBuffer(info); 5354 break; 5355 5356 case FREE_BUFFERS: 5357 default: 5358 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5359 return false; 5360 } 5361 5362 return true; 5363} 5364 5365void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5366 if (mCodec->mPortEOS[kPortIndexInput]) { 5367 return; 5368 } 5369 5370 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5371 5372 sp<AMessage> notify = mCodec->mNotify->dup(); 5373 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5374 notify->setInt32("buffer-id", info->mBufferID); 5375 5376 info->mData->meta()->clear(); 5377 notify->setBuffer("buffer", info->mData); 5378 5379 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5380 reply->setInt32("buffer-id", info->mBufferID); 5381 5382 notify->setMessage("reply", reply); 5383 5384 notify->post(); 5385 5386 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5387} 5388 5389void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5390 IOMX::buffer_id bufferID; 5391 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5392 sp<ABuffer> buffer; 5393 int32_t err = OK; 5394 bool eos = false; 5395 PortMode mode = getPortMode(kPortIndexInput); 5396 5397 if (!msg->findBuffer("buffer", &buffer)) { 5398 /* these are unfilled buffers returned by client */ 5399 CHECK(msg->findInt32("err", &err)); 5400 5401 if (err == OK) { 5402 /* buffers with no errors are returned on MediaCodec.flush */ 5403 mode = KEEP_BUFFERS; 5404 } else { 5405 ALOGV("[%s] saw error %d instead of an input buffer", 5406 mCodec->mComponentName.c_str(), err); 5407 eos = true; 5408 } 5409 5410 buffer.clear(); 5411 } 5412 5413 int32_t tmp; 5414 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5415 eos = true; 5416 err = ERROR_END_OF_STREAM; 5417 } 5418 5419 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5420 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5421 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5422 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5423 mCodec->dumpBuffers(kPortIndexInput); 5424 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5425 return; 5426 } 5427 5428 info->mStatus = BufferInfo::OWNED_BY_US; 5429 5430 switch (mode) { 5431 case KEEP_BUFFERS: 5432 { 5433 if (eos) { 5434 if (!mCodec->mPortEOS[kPortIndexInput]) { 5435 mCodec->mPortEOS[kPortIndexInput] = true; 5436 mCodec->mInputEOSResult = err; 5437 } 5438 } 5439 break; 5440 } 5441 5442 case RESUBMIT_BUFFERS: 5443 { 5444 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5445 // Do not send empty input buffer w/o EOS to the component. 5446 if (buffer->size() == 0 && !eos) { 5447 postFillThisBuffer(info); 5448 break; 5449 } 5450 5451 int64_t timeUs; 5452 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5453 5454 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5455 5456 int32_t isCSD; 5457 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5458 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5459 } 5460 5461 if (eos) { 5462 flags |= OMX_BUFFERFLAG_EOS; 5463 } 5464 5465 if (buffer != info->mData) { 5466 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5467 mCodec->mComponentName.c_str(), 5468 bufferID, 5469 buffer.get(), info->mData.get()); 5470 5471 if (buffer->size() > info->mData->capacity()) { 5472 ALOGE("data size (%zu) is greated than buffer capacity (%zu)", 5473 buffer->size(), // this is the data received 5474 info->mData->capacity()); // this is out buffer size 5475 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5476 return; 5477 } 5478 memcpy(info->mData->data(), buffer->data(), buffer->size()); 5479 } 5480 5481 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5482 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5483 mCodec->mComponentName.c_str(), bufferID); 5484 } else if (flags & OMX_BUFFERFLAG_EOS) { 5485 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5486 mCodec->mComponentName.c_str(), bufferID); 5487 } else { 5488#if TRACK_BUFFER_TIMING 5489 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5490 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5491#else 5492 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5493 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5494#endif 5495 } 5496 5497#if TRACK_BUFFER_TIMING 5498 ACodec::BufferStats stats; 5499 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5500 stats.mFillBufferDoneTimeUs = -1ll; 5501 mCodec->mBufferStats.add(timeUs, stats); 5502#endif 5503 5504 if (mCodec->storingMetadataInDecodedBuffers()) { 5505 // try to submit an output buffer for each input buffer 5506 PortMode outputMode = getPortMode(kPortIndexOutput); 5507 5508 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 5509 mCodec->mMetadataBuffersToSubmit, 5510 (outputMode == FREE_BUFFERS ? "FREE" : 5511 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 5512 if (outputMode == RESUBMIT_BUFFERS) { 5513 mCodec->submitOutputMetadataBuffer(); 5514 } 5515 } 5516 info->checkReadFence("onInputBufferFilled"); 5517 status_t err2 = mCodec->mOMX->emptyBuffer( 5518 mCodec->mNode, 5519 bufferID, 5520 0, 5521 buffer->size(), 5522 flags, 5523 timeUs, 5524 info->mFenceFd); 5525 info->mFenceFd = -1; 5526 if (err2 != OK) { 5527 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5528 return; 5529 } 5530 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5531 5532 if (!eos && err == OK) { 5533 getMoreInputDataIfPossible(); 5534 } else { 5535 ALOGV("[%s] Signalled EOS (%d) on the input port", 5536 mCodec->mComponentName.c_str(), err); 5537 5538 mCodec->mPortEOS[kPortIndexInput] = true; 5539 mCodec->mInputEOSResult = err; 5540 } 5541 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 5542 if (err != OK && err != ERROR_END_OF_STREAM) { 5543 ALOGV("[%s] Signalling EOS on the input port due to error %d", 5544 mCodec->mComponentName.c_str(), err); 5545 } else { 5546 ALOGV("[%s] Signalling EOS on the input port", 5547 mCodec->mComponentName.c_str()); 5548 } 5549 5550 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 5551 mCodec->mComponentName.c_str(), bufferID); 5552 5553 info->checkReadFence("onInputBufferFilled"); 5554 status_t err2 = mCodec->mOMX->emptyBuffer( 5555 mCodec->mNode, 5556 bufferID, 5557 0, 5558 0, 5559 OMX_BUFFERFLAG_EOS, 5560 0, 5561 info->mFenceFd); 5562 info->mFenceFd = -1; 5563 if (err2 != OK) { 5564 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5565 return; 5566 } 5567 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5568 5569 mCodec->mPortEOS[kPortIndexInput] = true; 5570 mCodec->mInputEOSResult = err; 5571 } 5572 break; 5573 } 5574 5575 case FREE_BUFFERS: 5576 break; 5577 5578 default: 5579 ALOGE("invalid port mode: %d", mode); 5580 break; 5581 } 5582} 5583 5584void ACodec::BaseState::getMoreInputDataIfPossible() { 5585 if (mCodec->mPortEOS[kPortIndexInput]) { 5586 return; 5587 } 5588 5589 BufferInfo *eligible = NULL; 5590 5591 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 5592 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5593 5594#if 0 5595 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 5596 // There's already a "read" pending. 5597 return; 5598 } 5599#endif 5600 5601 if (info->mStatus == BufferInfo::OWNED_BY_US) { 5602 eligible = info; 5603 } 5604 } 5605 5606 if (eligible == NULL) { 5607 return; 5608 } 5609 5610 postFillThisBuffer(eligible); 5611} 5612 5613bool ACodec::BaseState::onOMXFillBufferDone( 5614 IOMX::buffer_id bufferID, 5615 size_t rangeOffset, size_t rangeLength, 5616 OMX_U32 flags, 5617 int64_t timeUs, 5618 int fenceFd) { 5619 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 5620 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 5621 5622 ssize_t index; 5623 status_t err= OK; 5624 5625#if TRACK_BUFFER_TIMING 5626 index = mCodec->mBufferStats.indexOfKey(timeUs); 5627 if (index >= 0) { 5628 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 5629 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 5630 5631 ALOGI("frame PTS %lld: %lld", 5632 timeUs, 5633 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 5634 5635 mCodec->mBufferStats.removeItemsAt(index); 5636 stats = NULL; 5637 } 5638#endif 5639 5640 BufferInfo *info = 5641 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5642 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5643 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5644 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5645 mCodec->dumpBuffers(kPortIndexOutput); 5646 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5647 if (fenceFd >= 0) { 5648 ::close(fenceFd); 5649 } 5650 return true; 5651 } 5652 5653 info->mDequeuedAt = ++mCodec->mDequeueCounter; 5654 info->mStatus = BufferInfo::OWNED_BY_US; 5655 5656 if (info->mRenderInfo != NULL) { 5657 // The fence for an emptied buffer must have signaled, but there still could be queued 5658 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 5659 // as we will soon requeue this buffer to the surface. While in theory we could still keep 5660 // track of buffers that are requeued to the surface, it is better to add support to the 5661 // buffer-queue to notify us of released buffers and their fences (in the future). 5662 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 5663 } 5664 5665 // byte buffers cannot take fences, so wait for any fence now 5666 if (mCodec->mNativeWindow == NULL) { 5667 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 5668 fenceFd = -1; 5669 } 5670 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 5671 5672 PortMode mode = getPortMode(kPortIndexOutput); 5673 5674 switch (mode) { 5675 case KEEP_BUFFERS: 5676 break; 5677 5678 case RESUBMIT_BUFFERS: 5679 { 5680 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 5681 || mCodec->mPortEOS[kPortIndexOutput])) { 5682 ALOGV("[%s] calling fillBuffer %u", 5683 mCodec->mComponentName.c_str(), info->mBufferID); 5684 5685 err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 5686 info->mFenceFd = -1; 5687 if (err != OK) { 5688 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5689 return true; 5690 } 5691 5692 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5693 break; 5694 } 5695 5696 sp<AMessage> reply = 5697 new AMessage(kWhatOutputBufferDrained, mCodec); 5698 5699 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 5700 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 5701 mCodec->sendFormatChange(); 5702 } 5703 5704 if (mCodec->usingMetadataOnEncoderOutput()) { 5705 native_handle_t *handle = NULL; 5706 VideoGrallocMetadata &grallocMeta = *(VideoGrallocMetadata *)info->mData->data(); 5707 VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)info->mData->data(); 5708 if (info->mData->size() >= sizeof(grallocMeta) 5709 && grallocMeta.eType == kMetadataBufferTypeGrallocSource) { 5710 handle = (native_handle_t *)(uintptr_t)grallocMeta.pHandle; 5711 } else if (info->mData->size() >= sizeof(nativeMeta) 5712 && nativeMeta.eType == kMetadataBufferTypeANWBuffer) { 5713#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5714 // ANativeWindowBuffer is only valid on 32-bit/mediaserver process 5715 handle = NULL; 5716#else 5717 handle = (native_handle_t *)nativeMeta.pBuffer->handle; 5718#endif 5719 } 5720 info->mData->meta()->setPointer("handle", handle); 5721 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 5722 info->mData->meta()->setInt32("rangeLength", rangeLength); 5723 } else { 5724 info->mData->setRange(rangeOffset, rangeLength); 5725 } 5726#if 0 5727 if (mCodec->mNativeWindow == NULL) { 5728 if (IsIDR(info->mData)) { 5729 ALOGI("IDR frame"); 5730 } 5731 } 5732#endif 5733 5734 if (mCodec->mSkipCutBuffer != NULL) { 5735 mCodec->mSkipCutBuffer->submit(info->mData); 5736 } 5737 info->mData->meta()->setInt64("timeUs", timeUs); 5738 5739 sp<AMessage> notify = mCodec->mNotify->dup(); 5740 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 5741 notify->setInt32("buffer-id", info->mBufferID); 5742 notify->setBuffer("buffer", info->mData); 5743 notify->setInt32("flags", flags); 5744 5745 reply->setInt32("buffer-id", info->mBufferID); 5746 5747 notify->setMessage("reply", reply); 5748 5749 notify->post(); 5750 5751 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 5752 5753 if (flags & OMX_BUFFERFLAG_EOS) { 5754 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 5755 5756 sp<AMessage> notify = mCodec->mNotify->dup(); 5757 notify->setInt32("what", CodecBase::kWhatEOS); 5758 notify->setInt32("err", mCodec->mInputEOSResult); 5759 notify->post(); 5760 5761 mCodec->mPortEOS[kPortIndexOutput] = true; 5762 } 5763 break; 5764 } 5765 5766 case FREE_BUFFERS: 5767 err = mCodec->freeBuffer(kPortIndexOutput, index); 5768 if (err != OK) { 5769 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5770 return true; 5771 } 5772 break; 5773 5774 default: 5775 ALOGE("Invalid port mode: %d", mode); 5776 return false; 5777 } 5778 5779 return true; 5780} 5781 5782void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 5783 IOMX::buffer_id bufferID; 5784 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5785 ssize_t index; 5786 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5787 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5788 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 5789 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5790 mCodec->dumpBuffers(kPortIndexOutput); 5791 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5792 return; 5793 } 5794 5795 android_native_rect_t crop; 5796 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom)) { 5797 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 5798 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 5799 } 5800 5801 int32_t dataSpace; 5802 if (msg->findInt32("dataspace", &dataSpace)) { 5803 status_t err = native_window_set_buffers_data_space( 5804 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 5805 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 5806 } 5807 5808 int32_t render; 5809 if (mCodec->mNativeWindow != NULL 5810 && msg->findInt32("render", &render) && render != 0 5811 && info->mData != NULL && info->mData->size() != 0) { 5812 ATRACE_NAME("render"); 5813 // The client wants this buffer to be rendered. 5814 5815 // save buffers sent to the surface so we can get render time when they return 5816 int64_t mediaTimeUs = -1; 5817 info->mData->meta()->findInt64("timeUs", &mediaTimeUs); 5818 if (mediaTimeUs >= 0) { 5819 mCodec->mRenderTracker.onFrameQueued( 5820 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 5821 } 5822 5823 int64_t timestampNs = 0; 5824 if (!msg->findInt64("timestampNs", ×tampNs)) { 5825 // use media timestamp if client did not request a specific render timestamp 5826 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 5827 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 5828 timestampNs *= 1000; 5829 } 5830 } 5831 5832 status_t err; 5833 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 5834 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 5835 5836 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 5837 err = mCodec->mNativeWindow->queueBuffer( 5838 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 5839 info->mFenceFd = -1; 5840 if (err == OK) { 5841 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 5842 } else { 5843 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 5844 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5845 info->mStatus = BufferInfo::OWNED_BY_US; 5846 // keeping read fence as write fence to avoid clobbering 5847 info->mIsReadFence = false; 5848 } 5849 } else { 5850 if (mCodec->mNativeWindow != NULL && 5851 (info->mData == NULL || info->mData->size() != 0)) { 5852 // move read fence into write fence to avoid clobbering 5853 info->mIsReadFence = false; 5854 ATRACE_NAME("frame-drop"); 5855 } 5856 info->mStatus = BufferInfo::OWNED_BY_US; 5857 } 5858 5859 PortMode mode = getPortMode(kPortIndexOutput); 5860 5861 switch (mode) { 5862 case KEEP_BUFFERS: 5863 { 5864 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 5865 5866 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 5867 // We cannot resubmit the buffer we just rendered, dequeue 5868 // the spare instead. 5869 5870 info = mCodec->dequeueBufferFromNativeWindow(); 5871 } 5872 break; 5873 } 5874 5875 case RESUBMIT_BUFFERS: 5876 { 5877 if (!mCodec->mPortEOS[kPortIndexOutput]) { 5878 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 5879 // We cannot resubmit the buffer we just rendered, dequeue 5880 // the spare instead. 5881 5882 info = mCodec->dequeueBufferFromNativeWindow(); 5883 } 5884 5885 if (info != NULL) { 5886 ALOGV("[%s] calling fillBuffer %u", 5887 mCodec->mComponentName.c_str(), info->mBufferID); 5888 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 5889 status_t err = mCodec->mOMX->fillBuffer( 5890 mCodec->mNode, info->mBufferID, info->mFenceFd); 5891 info->mFenceFd = -1; 5892 if (err == OK) { 5893 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5894 } else { 5895 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5896 } 5897 } 5898 } 5899 break; 5900 } 5901 5902 case FREE_BUFFERS: 5903 { 5904 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 5905 if (err != OK) { 5906 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5907 } 5908 break; 5909 } 5910 5911 default: 5912 ALOGE("Invalid port mode: %d", mode); 5913 return; 5914 } 5915} 5916 5917//////////////////////////////////////////////////////////////////////////////// 5918 5919ACodec::UninitializedState::UninitializedState(ACodec *codec) 5920 : BaseState(codec) { 5921} 5922 5923void ACodec::UninitializedState::stateEntered() { 5924 ALOGV("Now uninitialized"); 5925 5926 if (mDeathNotifier != NULL) { 5927 mCodec->mNodeBinder->unlinkToDeath(mDeathNotifier); 5928 mDeathNotifier.clear(); 5929 } 5930 5931 mCodec->mUsingNativeWindow = false; 5932 mCodec->mNativeWindow.clear(); 5933 mCodec->mNativeWindowUsageBits = 0; 5934 mCodec->mNode = 0; 5935 mCodec->mOMX.clear(); 5936 mCodec->mQuirks = 0; 5937 mCodec->mFlags = 0; 5938 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 5939 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 5940 mCodec->mComponentName.clear(); 5941} 5942 5943bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 5944 bool handled = false; 5945 5946 switch (msg->what()) { 5947 case ACodec::kWhatSetup: 5948 { 5949 onSetup(msg); 5950 5951 handled = true; 5952 break; 5953 } 5954 5955 case ACodec::kWhatAllocateComponent: 5956 { 5957 onAllocateComponent(msg); 5958 handled = true; 5959 break; 5960 } 5961 5962 case ACodec::kWhatShutdown: 5963 { 5964 int32_t keepComponentAllocated; 5965 CHECK(msg->findInt32( 5966 "keepComponentAllocated", &keepComponentAllocated)); 5967 ALOGW_IF(keepComponentAllocated, 5968 "cannot keep component allocated on shutdown in Uninitialized state"); 5969 5970 sp<AMessage> notify = mCodec->mNotify->dup(); 5971 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5972 notify->post(); 5973 5974 handled = true; 5975 break; 5976 } 5977 5978 case ACodec::kWhatFlush: 5979 { 5980 sp<AMessage> notify = mCodec->mNotify->dup(); 5981 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 5982 notify->post(); 5983 5984 handled = true; 5985 break; 5986 } 5987 5988 case ACodec::kWhatReleaseCodecInstance: 5989 { 5990 // nothing to do, as we have already signaled shutdown 5991 handled = true; 5992 break; 5993 } 5994 5995 default: 5996 return BaseState::onMessageReceived(msg); 5997 } 5998 5999 return handled; 6000} 6001 6002void ACodec::UninitializedState::onSetup( 6003 const sp<AMessage> &msg) { 6004 if (onAllocateComponent(msg) 6005 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6006 mCodec->mLoadedState->onStart(); 6007 } 6008} 6009 6010bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6011 ALOGV("onAllocateComponent"); 6012 6013 CHECK(mCodec->mNode == 0); 6014 6015 OMXClient client; 6016 if (client.connect() != OK) { 6017 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6018 return false; 6019 } 6020 6021 sp<IOMX> omx = client.interface(); 6022 6023 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6024 6025 Vector<AString> matchingCodecs; 6026 6027 AString mime; 6028 6029 AString componentName; 6030 uint32_t quirks = 0; 6031 int32_t encoder = false; 6032 if (msg->findString("componentName", &componentName)) { 6033 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6034 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6035 matchingCodecs.add(componentName); 6036 } 6037 } else { 6038 CHECK(msg->findString("mime", &mime)); 6039 6040 if (!msg->findInt32("encoder", &encoder)) { 6041 encoder = false; 6042 } 6043 6044 MediaCodecList::findMatchingCodecs( 6045 mime.c_str(), 6046 encoder, // createEncoder 6047 0, // flags 6048 &matchingCodecs); 6049 } 6050 6051 sp<CodecObserver> observer = new CodecObserver; 6052 IOMX::node_id node = 0; 6053 6054 status_t err = NAME_NOT_FOUND; 6055 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6056 ++matchIndex) { 6057 componentName = matchingCodecs[matchIndex]; 6058 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6059 6060 pid_t tid = gettid(); 6061 int prevPriority = androidGetThreadPriority(tid); 6062 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6063 err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node); 6064 androidSetThreadPriority(tid, prevPriority); 6065 6066 if (err == OK) { 6067 break; 6068 } else { 6069 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6070 } 6071 6072 node = 0; 6073 } 6074 6075 if (node == 0) { 6076 if (!mime.empty()) { 6077 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6078 encoder ? "en" : "de", mime.c_str(), err); 6079 } else { 6080 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6081 } 6082 6083 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6084 return false; 6085 } 6086 6087 mDeathNotifier = new DeathNotifier(notify); 6088 if (mCodec->mNodeBinder == NULL || 6089 mCodec->mNodeBinder->linkToDeath(mDeathNotifier) != OK) { 6090 // This was a local binder, if it dies so do we, we won't care 6091 // about any notifications in the afterlife. 6092 mDeathNotifier.clear(); 6093 } 6094 6095 notify = new AMessage(kWhatOMXMessageList, mCodec); 6096 observer->setNotificationMessage(notify); 6097 6098 mCodec->mComponentName = componentName; 6099 mCodec->mRenderTracker.setComponentName(componentName); 6100 mCodec->mFlags = 0; 6101 6102 if (componentName.endsWith(".secure")) { 6103 mCodec->mFlags |= kFlagIsSecure; 6104 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6105 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6106 } 6107 6108 mCodec->mQuirks = quirks; 6109 mCodec->mOMX = omx; 6110 mCodec->mNode = node; 6111 6112 { 6113 sp<AMessage> notify = mCodec->mNotify->dup(); 6114 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6115 notify->setString("componentName", mCodec->mComponentName.c_str()); 6116 notify->post(); 6117 } 6118 6119 mCodec->changeState(mCodec->mLoadedState); 6120 6121 return true; 6122} 6123 6124//////////////////////////////////////////////////////////////////////////////// 6125 6126ACodec::LoadedState::LoadedState(ACodec *codec) 6127 : BaseState(codec) { 6128} 6129 6130void ACodec::LoadedState::stateEntered() { 6131 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6132 6133 mCodec->mPortEOS[kPortIndexInput] = 6134 mCodec->mPortEOS[kPortIndexOutput] = false; 6135 6136 mCodec->mInputEOSResult = OK; 6137 6138 mCodec->mDequeueCounter = 0; 6139 mCodec->mMetadataBuffersToSubmit = 0; 6140 mCodec->mRepeatFrameDelayUs = -1ll; 6141 mCodec->mInputFormat.clear(); 6142 mCodec->mOutputFormat.clear(); 6143 mCodec->mBaseOutputFormat.clear(); 6144 6145 if (mCodec->mShutdownInProgress) { 6146 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6147 6148 mCodec->mShutdownInProgress = false; 6149 mCodec->mKeepComponentAllocated = false; 6150 6151 onShutdown(keepComponentAllocated); 6152 } 6153 mCodec->mExplicitShutdown = false; 6154 6155 mCodec->processDeferredMessages(); 6156} 6157 6158void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6159 if (!keepComponentAllocated) { 6160 (void)mCodec->mOMX->freeNode(mCodec->mNode); 6161 6162 mCodec->changeState(mCodec->mUninitializedState); 6163 } 6164 6165 if (mCodec->mExplicitShutdown) { 6166 sp<AMessage> notify = mCodec->mNotify->dup(); 6167 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6168 notify->post(); 6169 mCodec->mExplicitShutdown = false; 6170 } 6171} 6172 6173bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6174 bool handled = false; 6175 6176 switch (msg->what()) { 6177 case ACodec::kWhatConfigureComponent: 6178 { 6179 onConfigureComponent(msg); 6180 handled = true; 6181 break; 6182 } 6183 6184 case ACodec::kWhatCreateInputSurface: 6185 { 6186 onCreateInputSurface(msg); 6187 handled = true; 6188 break; 6189 } 6190 6191 case ACodec::kWhatSetInputSurface: 6192 { 6193 onSetInputSurface(msg); 6194 handled = true; 6195 break; 6196 } 6197 6198 case ACodec::kWhatStart: 6199 { 6200 onStart(); 6201 handled = true; 6202 break; 6203 } 6204 6205 case ACodec::kWhatShutdown: 6206 { 6207 int32_t keepComponentAllocated; 6208 CHECK(msg->findInt32( 6209 "keepComponentAllocated", &keepComponentAllocated)); 6210 6211 mCodec->mExplicitShutdown = true; 6212 onShutdown(keepComponentAllocated); 6213 6214 handled = true; 6215 break; 6216 } 6217 6218 case ACodec::kWhatFlush: 6219 { 6220 sp<AMessage> notify = mCodec->mNotify->dup(); 6221 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6222 notify->post(); 6223 6224 handled = true; 6225 break; 6226 } 6227 6228 default: 6229 return BaseState::onMessageReceived(msg); 6230 } 6231 6232 return handled; 6233} 6234 6235bool ACodec::LoadedState::onConfigureComponent( 6236 const sp<AMessage> &msg) { 6237 ALOGV("onConfigureComponent"); 6238 6239 CHECK(mCodec->mNode != 0); 6240 6241 status_t err = OK; 6242 AString mime; 6243 if (!msg->findString("mime", &mime)) { 6244 err = BAD_VALUE; 6245 } else { 6246 err = mCodec->configureCodec(mime.c_str(), msg); 6247 } 6248 if (err != OK) { 6249 ALOGE("[%s] configureCodec returning error %d", 6250 mCodec->mComponentName.c_str(), err); 6251 6252 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6253 return false; 6254 } 6255 6256 { 6257 sp<AMessage> notify = mCodec->mNotify->dup(); 6258 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6259 notify->setMessage("input-format", mCodec->mInputFormat); 6260 notify->setMessage("output-format", mCodec->mOutputFormat); 6261 notify->post(); 6262 } 6263 6264 return true; 6265} 6266 6267status_t ACodec::LoadedState::setupInputSurface() { 6268 status_t err = OK; 6269 6270 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6271 err = mCodec->mOMX->setInternalOption( 6272 mCodec->mNode, 6273 kPortIndexInput, 6274 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 6275 &mCodec->mRepeatFrameDelayUs, 6276 sizeof(mCodec->mRepeatFrameDelayUs)); 6277 6278 if (err != OK) { 6279 ALOGE("[%s] Unable to configure option to repeat previous " 6280 "frames (err %d)", 6281 mCodec->mComponentName.c_str(), 6282 err); 6283 return err; 6284 } 6285 } 6286 6287 if (mCodec->mMaxPtsGapUs > 0ll) { 6288 err = mCodec->mOMX->setInternalOption( 6289 mCodec->mNode, 6290 kPortIndexInput, 6291 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 6292 &mCodec->mMaxPtsGapUs, 6293 sizeof(mCodec->mMaxPtsGapUs)); 6294 6295 if (err != OK) { 6296 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6297 mCodec->mComponentName.c_str(), 6298 err); 6299 return err; 6300 } 6301 } 6302 6303 if (mCodec->mMaxFps > 0) { 6304 err = mCodec->mOMX->setInternalOption( 6305 mCodec->mNode, 6306 kPortIndexInput, 6307 IOMX::INTERNAL_OPTION_MAX_FPS, 6308 &mCodec->mMaxFps, 6309 sizeof(mCodec->mMaxFps)); 6310 6311 if (err != OK) { 6312 ALOGE("[%s] Unable to configure max fps (err %d)", 6313 mCodec->mComponentName.c_str(), 6314 err); 6315 return err; 6316 } 6317 } 6318 6319 if (mCodec->mTimePerCaptureUs > 0ll 6320 && mCodec->mTimePerFrameUs > 0ll) { 6321 int64_t timeLapse[2]; 6322 timeLapse[0] = mCodec->mTimePerFrameUs; 6323 timeLapse[1] = mCodec->mTimePerCaptureUs; 6324 err = mCodec->mOMX->setInternalOption( 6325 mCodec->mNode, 6326 kPortIndexInput, 6327 IOMX::INTERNAL_OPTION_TIME_LAPSE, 6328 &timeLapse[0], 6329 sizeof(timeLapse)); 6330 6331 if (err != OK) { 6332 ALOGE("[%s] Unable to configure time lapse (err %d)", 6333 mCodec->mComponentName.c_str(), 6334 err); 6335 return err; 6336 } 6337 } 6338 6339 if (mCodec->mCreateInputBuffersSuspended) { 6340 bool suspend = true; 6341 err = mCodec->mOMX->setInternalOption( 6342 mCodec->mNode, 6343 kPortIndexInput, 6344 IOMX::INTERNAL_OPTION_SUSPEND, 6345 &suspend, 6346 sizeof(suspend)); 6347 6348 if (err != OK) { 6349 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6350 mCodec->mComponentName.c_str(), 6351 err); 6352 return err; 6353 } 6354 } 6355 6356 uint32_t usageBits; 6357 if (mCodec->mOMX->getParameter( 6358 mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6359 &usageBits, sizeof(usageBits)) == OK) { 6360 mCodec->mInputFormat->setInt32( 6361 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6362 } 6363 6364 sp<ABuffer> colorAspectsBuffer; 6365 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6366 err = mCodec->mOMX->setInternalOption( 6367 mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS, 6368 colorAspectsBuffer->base(), colorAspectsBuffer->capacity()); 6369 if (err != OK) { 6370 ALOGE("[%s] Unable to configure color aspects (err %d)", 6371 mCodec->mComponentName.c_str(), err); 6372 return err; 6373 } 6374 } 6375 return OK; 6376} 6377 6378void ACodec::LoadedState::onCreateInputSurface( 6379 const sp<AMessage> & /* msg */) { 6380 ALOGV("onCreateInputSurface"); 6381 6382 sp<AMessage> notify = mCodec->mNotify->dup(); 6383 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6384 6385 android_dataspace dataSpace; 6386 status_t err = 6387 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6388 notify->setMessage("input-format", mCodec->mInputFormat); 6389 notify->setMessage("output-format", mCodec->mOutputFormat); 6390 6391 sp<IGraphicBufferProducer> bufferProducer; 6392 if (err == OK) { 6393 err = mCodec->mOMX->createInputSurface( 6394 mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, &mCodec->mInputMetadataType); 6395 } 6396 6397 if (err == OK) { 6398 err = setupInputSurface(); 6399 } 6400 6401 if (err == OK) { 6402 notify->setObject("input-surface", 6403 new BufferProducerWrapper(bufferProducer)); 6404 } else { 6405 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6406 // the error through because it's in the "configured" state. We 6407 // send a kWhatInputSurfaceCreated with an error value instead. 6408 ALOGE("[%s] onCreateInputSurface returning error %d", 6409 mCodec->mComponentName.c_str(), err); 6410 notify->setInt32("err", err); 6411 } 6412 notify->post(); 6413} 6414 6415void ACodec::LoadedState::onSetInputSurface( 6416 const sp<AMessage> &msg) { 6417 ALOGV("onSetInputSurface"); 6418 6419 sp<AMessage> notify = mCodec->mNotify->dup(); 6420 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6421 6422 sp<RefBase> obj; 6423 CHECK(msg->findObject("input-surface", &obj)); 6424 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6425 6426 android_dataspace dataSpace; 6427 status_t err = 6428 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6429 notify->setMessage("input-format", mCodec->mInputFormat); 6430 notify->setMessage("output-format", mCodec->mOutputFormat); 6431 6432 if (err == OK) { 6433 err = mCodec->mOMX->setInputSurface( 6434 mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(), 6435 &mCodec->mInputMetadataType); 6436 } 6437 6438 if (err == OK) { 6439 surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace); 6440 err = setupInputSurface(); 6441 } 6442 6443 if (err != OK) { 6444 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6445 // the error through because it's in the "configured" state. We 6446 // send a kWhatInputSurfaceAccepted with an error value instead. 6447 ALOGE("[%s] onSetInputSurface returning error %d", 6448 mCodec->mComponentName.c_str(), err); 6449 notify->setInt32("err", err); 6450 } 6451 notify->post(); 6452} 6453 6454void ACodec::LoadedState::onStart() { 6455 ALOGV("onStart"); 6456 6457 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6458 if (err != OK) { 6459 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6460 } else { 6461 mCodec->changeState(mCodec->mLoadedToIdleState); 6462 } 6463} 6464 6465//////////////////////////////////////////////////////////////////////////////// 6466 6467ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 6468 : BaseState(codec) { 6469} 6470 6471void ACodec::LoadedToIdleState::stateEntered() { 6472 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 6473 6474 status_t err; 6475 if ((err = allocateBuffers()) != OK) { 6476 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 6477 "(error 0x%08x)", 6478 err); 6479 6480 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6481 6482 mCodec->mOMX->sendCommand( 6483 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 6484 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 6485 mCodec->freeBuffersOnPort(kPortIndexInput); 6486 } 6487 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 6488 mCodec->freeBuffersOnPort(kPortIndexOutput); 6489 } 6490 6491 mCodec->changeState(mCodec->mLoadedState); 6492 } 6493} 6494 6495status_t ACodec::LoadedToIdleState::allocateBuffers() { 6496 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 6497 6498 if (err != OK) { 6499 return err; 6500 } 6501 6502 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 6503} 6504 6505bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 6506 switch (msg->what()) { 6507 case kWhatSetParameters: 6508 case kWhatShutdown: 6509 { 6510 mCodec->deferMessage(msg); 6511 return true; 6512 } 6513 6514 case kWhatSignalEndOfInputStream: 6515 { 6516 mCodec->onSignalEndOfInputStream(); 6517 return true; 6518 } 6519 6520 case kWhatResume: 6521 { 6522 // We'll be active soon enough. 6523 return true; 6524 } 6525 6526 case kWhatFlush: 6527 { 6528 // We haven't even started yet, so we're flushed alright... 6529 sp<AMessage> notify = mCodec->mNotify->dup(); 6530 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6531 notify->post(); 6532 return true; 6533 } 6534 6535 default: 6536 return BaseState::onMessageReceived(msg); 6537 } 6538} 6539 6540bool ACodec::LoadedToIdleState::onOMXEvent( 6541 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6542 switch (event) { 6543 case OMX_EventCmdComplete: 6544 { 6545 status_t err = OK; 6546 if (data1 != (OMX_U32)OMX_CommandStateSet 6547 || data2 != (OMX_U32)OMX_StateIdle) { 6548 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 6549 asString((OMX_COMMANDTYPE)data1), data1, 6550 asString((OMX_STATETYPE)data2), data2); 6551 err = FAILED_TRANSACTION; 6552 } 6553 6554 if (err == OK) { 6555 err = mCodec->mOMX->sendCommand( 6556 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting); 6557 } 6558 6559 if (err != OK) { 6560 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6561 } else { 6562 mCodec->changeState(mCodec->mIdleToExecutingState); 6563 } 6564 6565 return true; 6566 } 6567 6568 default: 6569 return BaseState::onOMXEvent(event, data1, data2); 6570 } 6571} 6572 6573//////////////////////////////////////////////////////////////////////////////// 6574 6575ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 6576 : BaseState(codec) { 6577} 6578 6579void ACodec::IdleToExecutingState::stateEntered() { 6580 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 6581} 6582 6583bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6584 switch (msg->what()) { 6585 case kWhatSetParameters: 6586 case kWhatShutdown: 6587 { 6588 mCodec->deferMessage(msg); 6589 return true; 6590 } 6591 6592 case kWhatResume: 6593 { 6594 // We'll be active soon enough. 6595 return true; 6596 } 6597 6598 case kWhatFlush: 6599 { 6600 // We haven't even started yet, so we're flushed alright... 6601 sp<AMessage> notify = mCodec->mNotify->dup(); 6602 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6603 notify->post(); 6604 6605 return true; 6606 } 6607 6608 case kWhatSignalEndOfInputStream: 6609 { 6610 mCodec->onSignalEndOfInputStream(); 6611 return true; 6612 } 6613 6614 default: 6615 return BaseState::onMessageReceived(msg); 6616 } 6617} 6618 6619bool ACodec::IdleToExecutingState::onOMXEvent( 6620 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6621 switch (event) { 6622 case OMX_EventCmdComplete: 6623 { 6624 if (data1 != (OMX_U32)OMX_CommandStateSet 6625 || data2 != (OMX_U32)OMX_StateExecuting) { 6626 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 6627 asString((OMX_COMMANDTYPE)data1), data1, 6628 asString((OMX_STATETYPE)data2), data2); 6629 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6630 return true; 6631 } 6632 6633 mCodec->mExecutingState->resume(); 6634 mCodec->changeState(mCodec->mExecutingState); 6635 6636 return true; 6637 } 6638 6639 default: 6640 return BaseState::onOMXEvent(event, data1, data2); 6641 } 6642} 6643 6644//////////////////////////////////////////////////////////////////////////////// 6645 6646ACodec::ExecutingState::ExecutingState(ACodec *codec) 6647 : BaseState(codec), 6648 mActive(false) { 6649} 6650 6651ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 6652 OMX_U32 /* portIndex */) { 6653 return RESUBMIT_BUFFERS; 6654} 6655 6656void ACodec::ExecutingState::submitOutputMetaBuffers() { 6657 // submit as many buffers as there are input buffers with the codec 6658 // in case we are in port reconfiguring 6659 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 6660 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6661 6662 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 6663 if (mCodec->submitOutputMetadataBuffer() != OK) 6664 break; 6665 } 6666 } 6667 6668 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 6669 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 6670} 6671 6672void ACodec::ExecutingState::submitRegularOutputBuffers() { 6673 bool failed = false; 6674 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 6675 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 6676 6677 if (mCodec->mNativeWindow != NULL) { 6678 if (info->mStatus != BufferInfo::OWNED_BY_US 6679 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6680 ALOGE("buffers should be owned by us or the surface"); 6681 failed = true; 6682 break; 6683 } 6684 6685 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6686 continue; 6687 } 6688 } else { 6689 if (info->mStatus != BufferInfo::OWNED_BY_US) { 6690 ALOGE("buffers should be owned by us"); 6691 failed = true; 6692 break; 6693 } 6694 } 6695 6696 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 6697 6698 info->checkWriteFence("submitRegularOutputBuffers"); 6699 status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 6700 info->mFenceFd = -1; 6701 if (err != OK) { 6702 failed = true; 6703 break; 6704 } 6705 6706 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6707 } 6708 6709 if (failed) { 6710 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6711 } 6712} 6713 6714void ACodec::ExecutingState::submitOutputBuffers() { 6715 submitRegularOutputBuffers(); 6716 if (mCodec->storingMetadataInDecodedBuffers()) { 6717 submitOutputMetaBuffers(); 6718 } 6719} 6720 6721void ACodec::ExecutingState::resume() { 6722 if (mActive) { 6723 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 6724 return; 6725 } 6726 6727 submitOutputBuffers(); 6728 6729 // Post all available input buffers 6730 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 6731 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 6732 } 6733 6734 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 6735 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6736 if (info->mStatus == BufferInfo::OWNED_BY_US) { 6737 postFillThisBuffer(info); 6738 } 6739 } 6740 6741 mActive = true; 6742} 6743 6744void ACodec::ExecutingState::stateEntered() { 6745 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 6746 6747 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 6748 mCodec->processDeferredMessages(); 6749} 6750 6751bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6752 bool handled = false; 6753 6754 switch (msg->what()) { 6755 case kWhatShutdown: 6756 { 6757 int32_t keepComponentAllocated; 6758 CHECK(msg->findInt32( 6759 "keepComponentAllocated", &keepComponentAllocated)); 6760 6761 mCodec->mShutdownInProgress = true; 6762 mCodec->mExplicitShutdown = true; 6763 mCodec->mKeepComponentAllocated = keepComponentAllocated; 6764 6765 mActive = false; 6766 6767 status_t err = mCodec->mOMX->sendCommand( 6768 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6769 if (err != OK) { 6770 if (keepComponentAllocated) { 6771 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6772 } 6773 // TODO: do some recovery here. 6774 } else { 6775 mCodec->changeState(mCodec->mExecutingToIdleState); 6776 } 6777 6778 handled = true; 6779 break; 6780 } 6781 6782 case kWhatFlush: 6783 { 6784 ALOGV("[%s] ExecutingState flushing now " 6785 "(codec owns %zu/%zu input, %zu/%zu output).", 6786 mCodec->mComponentName.c_str(), 6787 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 6788 mCodec->mBuffers[kPortIndexInput].size(), 6789 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 6790 mCodec->mBuffers[kPortIndexOutput].size()); 6791 6792 mActive = false; 6793 6794 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL); 6795 if (err != OK) { 6796 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6797 } else { 6798 mCodec->changeState(mCodec->mFlushingState); 6799 } 6800 6801 handled = true; 6802 break; 6803 } 6804 6805 case kWhatResume: 6806 { 6807 resume(); 6808 6809 handled = true; 6810 break; 6811 } 6812 6813 case kWhatRequestIDRFrame: 6814 { 6815 status_t err = mCodec->requestIDRFrame(); 6816 if (err != OK) { 6817 ALOGW("Requesting an IDR frame failed."); 6818 } 6819 6820 handled = true; 6821 break; 6822 } 6823 6824 case kWhatSetParameters: 6825 { 6826 sp<AMessage> params; 6827 CHECK(msg->findMessage("params", ¶ms)); 6828 6829 status_t err = mCodec->setParameters(params); 6830 6831 sp<AMessage> reply; 6832 if (msg->findMessage("reply", &reply)) { 6833 reply->setInt32("err", err); 6834 reply->post(); 6835 } 6836 6837 handled = true; 6838 break; 6839 } 6840 6841 case ACodec::kWhatSignalEndOfInputStream: 6842 { 6843 mCodec->onSignalEndOfInputStream(); 6844 handled = true; 6845 break; 6846 } 6847 6848 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 6849 case kWhatSubmitOutputMetadataBufferIfEOS: 6850 { 6851 if (mCodec->mPortEOS[kPortIndexInput] && 6852 !mCodec->mPortEOS[kPortIndexOutput]) { 6853 status_t err = mCodec->submitOutputMetadataBuffer(); 6854 if (err == OK) { 6855 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 6856 } 6857 } 6858 return true; 6859 } 6860 6861 default: 6862 handled = BaseState::onMessageReceived(msg); 6863 break; 6864 } 6865 6866 return handled; 6867} 6868 6869status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 6870 int32_t videoBitrate; 6871 if (params->findInt32("video-bitrate", &videoBitrate)) { 6872 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 6873 InitOMXParams(&configParams); 6874 configParams.nPortIndex = kPortIndexOutput; 6875 configParams.nEncodeBitrate = videoBitrate; 6876 6877 status_t err = mOMX->setConfig( 6878 mNode, 6879 OMX_IndexConfigVideoBitrate, 6880 &configParams, 6881 sizeof(configParams)); 6882 6883 if (err != OK) { 6884 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 6885 videoBitrate, err); 6886 6887 return err; 6888 } 6889 } 6890 6891 int64_t skipFramesBeforeUs; 6892 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 6893 status_t err = 6894 mOMX->setInternalOption( 6895 mNode, 6896 kPortIndexInput, 6897 IOMX::INTERNAL_OPTION_START_TIME, 6898 &skipFramesBeforeUs, 6899 sizeof(skipFramesBeforeUs)); 6900 6901 if (err != OK) { 6902 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 6903 return err; 6904 } 6905 } 6906 6907 int32_t dropInputFrames; 6908 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 6909 bool suspend = dropInputFrames != 0; 6910 6911 status_t err = 6912 mOMX->setInternalOption( 6913 mNode, 6914 kPortIndexInput, 6915 IOMX::INTERNAL_OPTION_SUSPEND, 6916 &suspend, 6917 sizeof(suspend)); 6918 6919 if (err != OK) { 6920 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 6921 return err; 6922 } 6923 } 6924 6925 int32_t dummy; 6926 if (params->findInt32("request-sync", &dummy)) { 6927 status_t err = requestIDRFrame(); 6928 6929 if (err != OK) { 6930 ALOGE("Requesting a sync frame failed w/ err %d", err); 6931 return err; 6932 } 6933 } 6934 6935 float rate; 6936 if (params->findFloat("operating-rate", &rate) && rate > 0) { 6937 status_t err = setOperatingRate(rate, mIsVideo); 6938 if (err != OK) { 6939 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 6940 return err; 6941 } 6942 } 6943 6944 int32_t intraRefreshPeriod = 0; 6945 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 6946 && intraRefreshPeriod > 0) { 6947 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 6948 if (err != OK) { 6949 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 6950 mComponentName.c_str()); 6951 err = OK; 6952 } 6953 } 6954 6955 return OK; 6956} 6957 6958void ACodec::onSignalEndOfInputStream() { 6959 sp<AMessage> notify = mNotify->dup(); 6960 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 6961 6962 status_t err = mOMX->signalEndOfInputStream(mNode); 6963 if (err != OK) { 6964 notify->setInt32("err", err); 6965 } 6966 notify->post(); 6967} 6968 6969bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 6970 mCodec->onFrameRendered(mediaTimeUs, systemNano); 6971 return true; 6972} 6973 6974bool ACodec::ExecutingState::onOMXEvent( 6975 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6976 switch (event) { 6977 case OMX_EventPortSettingsChanged: 6978 { 6979 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 6980 6981 mCodec->onOutputFormatChanged(); 6982 6983 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 6984 mCodec->mMetadataBuffersToSubmit = 0; 6985 CHECK_EQ(mCodec->mOMX->sendCommand( 6986 mCodec->mNode, 6987 OMX_CommandPortDisable, kPortIndexOutput), 6988 (status_t)OK); 6989 6990 mCodec->freeOutputBuffersNotOwnedByComponent(); 6991 6992 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 6993 } else if (data2 != OMX_IndexConfigCommonOutputCrop 6994 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 6995 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 6996 mCodec->mComponentName.c_str(), data2); 6997 } 6998 6999 return true; 7000 } 7001 7002 case OMX_EventBufferFlag: 7003 { 7004 return true; 7005 } 7006 7007 default: 7008 return BaseState::onOMXEvent(event, data1, data2); 7009 } 7010} 7011 7012//////////////////////////////////////////////////////////////////////////////// 7013 7014ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7015 ACodec *codec) 7016 : BaseState(codec) { 7017} 7018 7019ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7020 OMX_U32 portIndex) { 7021 if (portIndex == kPortIndexOutput) { 7022 return FREE_BUFFERS; 7023 } 7024 7025 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7026 7027 return RESUBMIT_BUFFERS; 7028} 7029 7030bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7031 const sp<AMessage> &msg) { 7032 bool handled = false; 7033 7034 switch (msg->what()) { 7035 case kWhatFlush: 7036 case kWhatShutdown: 7037 case kWhatResume: 7038 case kWhatSetParameters: 7039 { 7040 if (msg->what() == kWhatResume) { 7041 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7042 } 7043 7044 mCodec->deferMessage(msg); 7045 handled = true; 7046 break; 7047 } 7048 7049 default: 7050 handled = BaseState::onMessageReceived(msg); 7051 break; 7052 } 7053 7054 return handled; 7055} 7056 7057void ACodec::OutputPortSettingsChangedState::stateEntered() { 7058 ALOGV("[%s] Now handling output port settings change", 7059 mCodec->mComponentName.c_str()); 7060} 7061 7062bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7063 int64_t mediaTimeUs, nsecs_t systemNano) { 7064 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7065 return true; 7066} 7067 7068bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7069 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7070 switch (event) { 7071 case OMX_EventCmdComplete: 7072 { 7073 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7074 if (data2 != (OMX_U32)kPortIndexOutput) { 7075 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7076 return false; 7077 } 7078 7079 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7080 7081 status_t err = OK; 7082 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7083 ALOGE("disabled port should be empty, but has %zu buffers", 7084 mCodec->mBuffers[kPortIndexOutput].size()); 7085 err = FAILED_TRANSACTION; 7086 } else { 7087 mCodec->mDealer[kPortIndexOutput].clear(); 7088 } 7089 7090 if (err == OK) { 7091 err = mCodec->mOMX->sendCommand( 7092 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput); 7093 } 7094 7095 if (err == OK) { 7096 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7097 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7098 "reconfiguration: (%d)", err); 7099 } 7100 7101 if (err != OK) { 7102 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7103 7104 // This is technically not correct, but appears to be 7105 // the only way to free the component instance. 7106 // Controlled transitioning from excecuting->idle 7107 // and idle->loaded seem impossible probably because 7108 // the output port never finishes re-enabling. 7109 mCodec->mShutdownInProgress = true; 7110 mCodec->mKeepComponentAllocated = false; 7111 mCodec->changeState(mCodec->mLoadedState); 7112 } 7113 7114 return true; 7115 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7116 if (data2 != (OMX_U32)kPortIndexOutput) { 7117 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7118 return false; 7119 } 7120 7121 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7122 7123 if (mCodec->mExecutingState->active()) { 7124 mCodec->mExecutingState->submitOutputBuffers(); 7125 } 7126 7127 mCodec->changeState(mCodec->mExecutingState); 7128 7129 return true; 7130 } 7131 7132 return false; 7133 } 7134 7135 default: 7136 return false; 7137 } 7138} 7139 7140//////////////////////////////////////////////////////////////////////////////// 7141 7142ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7143 : BaseState(codec), 7144 mComponentNowIdle(false) { 7145} 7146 7147bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7148 bool handled = false; 7149 7150 switch (msg->what()) { 7151 case kWhatFlush: 7152 { 7153 // Don't send me a flush request if you previously wanted me 7154 // to shutdown. 7155 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7156 break; 7157 } 7158 7159 case kWhatShutdown: 7160 { 7161 // We're already doing that... 7162 7163 handled = true; 7164 break; 7165 } 7166 7167 default: 7168 handled = BaseState::onMessageReceived(msg); 7169 break; 7170 } 7171 7172 return handled; 7173} 7174 7175void ACodec::ExecutingToIdleState::stateEntered() { 7176 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7177 7178 mComponentNowIdle = false; 7179 mCodec->mLastOutputFormat.clear(); 7180} 7181 7182bool ACodec::ExecutingToIdleState::onOMXEvent( 7183 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7184 switch (event) { 7185 case OMX_EventCmdComplete: 7186 { 7187 if (data1 != (OMX_U32)OMX_CommandStateSet 7188 || data2 != (OMX_U32)OMX_StateIdle) { 7189 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7190 asString((OMX_COMMANDTYPE)data1), data1, 7191 asString((OMX_STATETYPE)data2), data2); 7192 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7193 return true; 7194 } 7195 7196 mComponentNowIdle = true; 7197 7198 changeStateIfWeOwnAllBuffers(); 7199 7200 return true; 7201 } 7202 7203 case OMX_EventPortSettingsChanged: 7204 case OMX_EventBufferFlag: 7205 { 7206 // We're shutting down and don't care about this anymore. 7207 return true; 7208 } 7209 7210 default: 7211 return BaseState::onOMXEvent(event, data1, data2); 7212 } 7213} 7214 7215void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7216 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7217 status_t err = mCodec->mOMX->sendCommand( 7218 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7219 if (err == OK) { 7220 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7221 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7222 if (err == OK) { 7223 err = err2; 7224 } 7225 } 7226 7227 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7228 && mCodec->mNativeWindow != NULL) { 7229 // We push enough 1x1 blank buffers to ensure that one of 7230 // them has made it to the display. This allows the OMX 7231 // component teardown to zero out any protected buffers 7232 // without the risk of scanning out one of those buffers. 7233 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7234 } 7235 7236 if (err != OK) { 7237 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7238 return; 7239 } 7240 7241 mCodec->changeState(mCodec->mIdleToLoadedState); 7242 } 7243} 7244 7245void ACodec::ExecutingToIdleState::onInputBufferFilled( 7246 const sp<AMessage> &msg) { 7247 BaseState::onInputBufferFilled(msg); 7248 7249 changeStateIfWeOwnAllBuffers(); 7250} 7251 7252void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7253 const sp<AMessage> &msg) { 7254 BaseState::onOutputBufferDrained(msg); 7255 7256 changeStateIfWeOwnAllBuffers(); 7257} 7258 7259//////////////////////////////////////////////////////////////////////////////// 7260 7261ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7262 : BaseState(codec) { 7263} 7264 7265bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7266 bool handled = false; 7267 7268 switch (msg->what()) { 7269 case kWhatShutdown: 7270 { 7271 // We're already doing that... 7272 7273 handled = true; 7274 break; 7275 } 7276 7277 case kWhatFlush: 7278 { 7279 // Don't send me a flush request if you previously wanted me 7280 // to shutdown. 7281 ALOGE("Got flush request in IdleToLoadedState"); 7282 break; 7283 } 7284 7285 default: 7286 handled = BaseState::onMessageReceived(msg); 7287 break; 7288 } 7289 7290 return handled; 7291} 7292 7293void ACodec::IdleToLoadedState::stateEntered() { 7294 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7295} 7296 7297bool ACodec::IdleToLoadedState::onOMXEvent( 7298 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7299 switch (event) { 7300 case OMX_EventCmdComplete: 7301 { 7302 if (data1 != (OMX_U32)OMX_CommandStateSet 7303 || data2 != (OMX_U32)OMX_StateLoaded) { 7304 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7305 asString((OMX_COMMANDTYPE)data1), data1, 7306 asString((OMX_STATETYPE)data2), data2); 7307 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7308 return true; 7309 } 7310 7311 mCodec->changeState(mCodec->mLoadedState); 7312 7313 return true; 7314 } 7315 7316 default: 7317 return BaseState::onOMXEvent(event, data1, data2); 7318 } 7319} 7320 7321//////////////////////////////////////////////////////////////////////////////// 7322 7323ACodec::FlushingState::FlushingState(ACodec *codec) 7324 : BaseState(codec) { 7325} 7326 7327void ACodec::FlushingState::stateEntered() { 7328 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7329 7330 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7331} 7332 7333bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7334 bool handled = false; 7335 7336 switch (msg->what()) { 7337 case kWhatShutdown: 7338 { 7339 mCodec->deferMessage(msg); 7340 break; 7341 } 7342 7343 case kWhatFlush: 7344 { 7345 // We're already doing this right now. 7346 handled = true; 7347 break; 7348 } 7349 7350 default: 7351 handled = BaseState::onMessageReceived(msg); 7352 break; 7353 } 7354 7355 return handled; 7356} 7357 7358bool ACodec::FlushingState::onOMXEvent( 7359 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7360 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7361 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7362 7363 switch (event) { 7364 case OMX_EventCmdComplete: 7365 { 7366 if (data1 != (OMX_U32)OMX_CommandFlush) { 7367 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7368 asString((OMX_COMMANDTYPE)data1), data1, data2); 7369 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7370 return true; 7371 } 7372 7373 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7374 if (mFlushComplete[data2]) { 7375 ALOGW("Flush already completed for %s port", 7376 data2 == kPortIndexInput ? "input" : "output"); 7377 return true; 7378 } 7379 mFlushComplete[data2] = true; 7380 7381 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7382 changeStateIfWeOwnAllBuffers(); 7383 } 7384 } else if (data2 == OMX_ALL) { 7385 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7386 ALOGW("received flush complete event for OMX_ALL before ports have been" 7387 "flushed (%d/%d)", 7388 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7389 return false; 7390 } 7391 7392 changeStateIfWeOwnAllBuffers(); 7393 } else { 7394 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7395 } 7396 7397 return true; 7398 } 7399 7400 case OMX_EventPortSettingsChanged: 7401 { 7402 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7403 msg->setInt32("type", omx_message::EVENT); 7404 msg->setInt32("node", mCodec->mNode); 7405 msg->setInt32("event", event); 7406 msg->setInt32("data1", data1); 7407 msg->setInt32("data2", data2); 7408 7409 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7410 mCodec->mComponentName.c_str()); 7411 7412 mCodec->deferMessage(msg); 7413 7414 return true; 7415 } 7416 7417 default: 7418 return BaseState::onOMXEvent(event, data1, data2); 7419 } 7420 7421 return true; 7422} 7423 7424void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7425 BaseState::onOutputBufferDrained(msg); 7426 7427 changeStateIfWeOwnAllBuffers(); 7428} 7429 7430void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7431 BaseState::onInputBufferFilled(msg); 7432 7433 changeStateIfWeOwnAllBuffers(); 7434} 7435 7436void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 7437 if (mFlushComplete[kPortIndexInput] 7438 && mFlushComplete[kPortIndexOutput] 7439 && mCodec->allYourBuffersAreBelongToUs()) { 7440 // We now own all buffers except possibly those still queued with 7441 // the native window for rendering. Let's get those back as well. 7442 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 7443 7444 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7445 7446 sp<AMessage> notify = mCodec->mNotify->dup(); 7447 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7448 notify->post(); 7449 7450 mCodec->mPortEOS[kPortIndexInput] = 7451 mCodec->mPortEOS[kPortIndexOutput] = false; 7452 7453 mCodec->mInputEOSResult = OK; 7454 7455 if (mCodec->mSkipCutBuffer != NULL) { 7456 mCodec->mSkipCutBuffer->clear(); 7457 } 7458 7459 mCodec->changeState(mCodec->mExecutingState); 7460 } 7461} 7462 7463status_t ACodec::queryCapabilities( 7464 const AString &name, const AString &mime, bool isEncoder, 7465 sp<MediaCodecInfo::Capabilities> *caps) { 7466 (*caps).clear(); 7467 const char *role = getComponentRole(isEncoder, mime.c_str()); 7468 if (role == NULL) { 7469 return BAD_VALUE; 7470 } 7471 7472 OMXClient client; 7473 status_t err = client.connect(); 7474 if (err != OK) { 7475 return err; 7476 } 7477 7478 sp<IOMX> omx = client.interface(); 7479 sp<CodecObserver> observer = new CodecObserver; 7480 IOMX::node_id node = 0; 7481 7482 err = omx->allocateNode(name.c_str(), observer, NULL, &node); 7483 if (err != OK) { 7484 client.disconnect(); 7485 return err; 7486 } 7487 7488 err = setComponentRole(omx, node, role); 7489 if (err != OK) { 7490 omx->freeNode(node); 7491 client.disconnect(); 7492 return err; 7493 } 7494 7495 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 7496 bool isVideo = mime.startsWithIgnoreCase("video/"); 7497 7498 if (isVideo) { 7499 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 7500 InitOMXParams(¶m); 7501 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7502 7503 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7504 param.nProfileIndex = index; 7505 status_t err = omx->getParameter( 7506 node, OMX_IndexParamVideoProfileLevelQuerySupported, 7507 ¶m, sizeof(param)); 7508 if (err != OK) { 7509 break; 7510 } 7511 builder->addProfileLevel(param.eProfile, param.eLevel); 7512 7513 if (index == kMaxIndicesToCheck) { 7514 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 7515 name.c_str(), index, 7516 param.eProfile, param.eLevel); 7517 } 7518 } 7519 7520 // Color format query 7521 // return colors in the order reported by the OMX component 7522 // prefix "flexible" standard ones with the flexible equivalent 7523 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 7524 InitOMXParams(&portFormat); 7525 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 7526 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 7527 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7528 portFormat.nIndex = index; 7529 status_t err = omx->getParameter( 7530 node, OMX_IndexParamVideoPortFormat, 7531 &portFormat, sizeof(portFormat)); 7532 if (err != OK) { 7533 break; 7534 } 7535 7536 OMX_U32 flexibleEquivalent; 7537 if (isFlexibleColorFormat( 7538 omx, node, portFormat.eColorFormat, false /* usingNativeWindow */, 7539 &flexibleEquivalent)) { 7540 bool marked = false; 7541 for (size_t i = 0; i < supportedColors.size(); ++i) { 7542 if (supportedColors[i] == flexibleEquivalent) { 7543 marked = true; 7544 break; 7545 } 7546 } 7547 if (!marked) { 7548 supportedColors.push(flexibleEquivalent); 7549 builder->addColorFormat(flexibleEquivalent); 7550 } 7551 } 7552 supportedColors.push(portFormat.eColorFormat); 7553 builder->addColorFormat(portFormat.eColorFormat); 7554 7555 if (index == kMaxIndicesToCheck) { 7556 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 7557 name.c_str(), index, 7558 asString(portFormat.eColorFormat), portFormat.eColorFormat); 7559 } 7560 } 7561 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 7562 // More audio codecs if they have profiles. 7563 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 7564 InitOMXParams(¶m); 7565 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7566 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7567 param.nProfileIndex = index; 7568 status_t err = omx->getParameter( 7569 node, (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 7570 ¶m, sizeof(param)); 7571 if (err != OK) { 7572 break; 7573 } 7574 // For audio, level is ignored. 7575 builder->addProfileLevel(param.eProfile, 0 /* level */); 7576 7577 if (index == kMaxIndicesToCheck) { 7578 ALOGW("[%s] stopping checking profiles after %u: %x", 7579 name.c_str(), index, 7580 param.eProfile); 7581 } 7582 } 7583 7584 // NOTE: Without Android extensions, OMX does not provide a way to query 7585 // AAC profile support 7586 if (param.nProfileIndex == 0) { 7587 ALOGW("component %s doesn't support profile query.", name.c_str()); 7588 } 7589 } 7590 7591 if (isVideo && !isEncoder) { 7592 native_handle_t *sidebandHandle = NULL; 7593 if (omx->configureVideoTunnelMode( 7594 node, kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 7595 // tunneled playback includes adaptive playback 7596 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 7597 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 7598 } else if (omx->storeMetaDataInBuffers( 7599 node, kPortIndexOutput, OMX_TRUE) == OK || 7600 omx->prepareForAdaptivePlayback( 7601 node, kPortIndexOutput, OMX_TRUE, 7602 1280 /* width */, 720 /* height */) == OK) { 7603 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 7604 } 7605 } 7606 7607 if (isVideo && isEncoder) { 7608 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 7609 InitOMXParams(¶ms); 7610 params.nPortIndex = kPortIndexOutput; 7611 // TODO: should we verify if fallback is supported? 7612 if (omx->getConfig( 7613 node, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 7614 ¶ms, sizeof(params)) == OK) { 7615 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 7616 } 7617 } 7618 7619 *caps = builder; 7620 omx->freeNode(node); 7621 client.disconnect(); 7622 return OK; 7623} 7624 7625// These are supposed be equivalent to the logic in 7626// "audio_channel_out_mask_from_count". 7627//static 7628status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 7629 switch (numChannels) { 7630 case 1: 7631 map[0] = OMX_AUDIO_ChannelCF; 7632 break; 7633 case 2: 7634 map[0] = OMX_AUDIO_ChannelLF; 7635 map[1] = OMX_AUDIO_ChannelRF; 7636 break; 7637 case 3: 7638 map[0] = OMX_AUDIO_ChannelLF; 7639 map[1] = OMX_AUDIO_ChannelRF; 7640 map[2] = OMX_AUDIO_ChannelCF; 7641 break; 7642 case 4: 7643 map[0] = OMX_AUDIO_ChannelLF; 7644 map[1] = OMX_AUDIO_ChannelRF; 7645 map[2] = OMX_AUDIO_ChannelLR; 7646 map[3] = OMX_AUDIO_ChannelRR; 7647 break; 7648 case 5: 7649 map[0] = OMX_AUDIO_ChannelLF; 7650 map[1] = OMX_AUDIO_ChannelRF; 7651 map[2] = OMX_AUDIO_ChannelCF; 7652 map[3] = OMX_AUDIO_ChannelLR; 7653 map[4] = OMX_AUDIO_ChannelRR; 7654 break; 7655 case 6: 7656 map[0] = OMX_AUDIO_ChannelLF; 7657 map[1] = OMX_AUDIO_ChannelRF; 7658 map[2] = OMX_AUDIO_ChannelCF; 7659 map[3] = OMX_AUDIO_ChannelLFE; 7660 map[4] = OMX_AUDIO_ChannelLR; 7661 map[5] = OMX_AUDIO_ChannelRR; 7662 break; 7663 case 7: 7664 map[0] = OMX_AUDIO_ChannelLF; 7665 map[1] = OMX_AUDIO_ChannelRF; 7666 map[2] = OMX_AUDIO_ChannelCF; 7667 map[3] = OMX_AUDIO_ChannelLFE; 7668 map[4] = OMX_AUDIO_ChannelLR; 7669 map[5] = OMX_AUDIO_ChannelRR; 7670 map[6] = OMX_AUDIO_ChannelCS; 7671 break; 7672 case 8: 7673 map[0] = OMX_AUDIO_ChannelLF; 7674 map[1] = OMX_AUDIO_ChannelRF; 7675 map[2] = OMX_AUDIO_ChannelCF; 7676 map[3] = OMX_AUDIO_ChannelLFE; 7677 map[4] = OMX_AUDIO_ChannelLR; 7678 map[5] = OMX_AUDIO_ChannelRR; 7679 map[6] = OMX_AUDIO_ChannelLS; 7680 map[7] = OMX_AUDIO_ChannelRS; 7681 break; 7682 default: 7683 return -EINVAL; 7684 } 7685 7686 return OK; 7687} 7688 7689} // namespace android 7690