ACodec.cpp revision ad60a0f6cd9012507f8086fe741236d12d280ef3
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47#include <media/OMXBuffer.h> 48 49#include <OMX_AudioExt.h> 50#include <OMX_VideoExt.h> 51#include <OMX_Component.h> 52#include <OMX_IndexExt.h> 53#include <OMX_AsString.h> 54 55#include "include/avc_utils.h" 56#include "include/DataConverter.h" 57#include "include/SecureBuffer.h" 58#include "include/SharedMemoryBuffer.h" 59#include "omx/OMXUtils.h" 60 61namespace android { 62 63using binder::Status; 64 65enum { 66 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 67}; 68 69// OMX errors are directly mapped into status_t range if 70// there is no corresponding MediaError status code. 71// Use the statusFromOMXError(int32_t omxError) function. 72// 73// Currently this is a direct map. 74// See frameworks/native/include/media/openmax/OMX_Core.h 75// 76// Vendor OMX errors from 0x90000000 - 0x9000FFFF 77// Extension OMX errors from 0x8F000000 - 0x90000000 78// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 79// 80 81// returns true if err is a recognized OMX error code. 82// as OMX error is OMX_S32, this is an int32_t type 83static inline bool isOMXError(int32_t err) { 84 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 85} 86 87// converts an OMX error to a status_t 88static inline status_t statusFromOMXError(int32_t omxError) { 89 switch (omxError) { 90 case OMX_ErrorInvalidComponentName: 91 case OMX_ErrorComponentNotFound: 92 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 93 default: 94 return isOMXError(omxError) ? omxError : 0; // no translation required 95 } 96} 97 98static inline status_t statusFromBinderStatus(const Status &status) { 99 if (status.isOk()) { 100 return OK; 101 } 102 status_t err; 103 if ((err = status.serviceSpecificErrorCode()) != OK) { 104 return err; 105 } 106 if ((err = status.transactionError()) != OK) { 107 return err; 108 } 109 // Other exception 110 return UNKNOWN_ERROR; 111} 112 113// checks and converts status_t to a non-side-effect status_t 114static inline status_t makeNoSideEffectStatus(status_t err) { 115 switch (err) { 116 // the following errors have side effects and may come 117 // from other code modules. Remap for safety reasons. 118 case INVALID_OPERATION: 119 case DEAD_OBJECT: 120 return UNKNOWN_ERROR; 121 default: 122 return err; 123 } 124} 125 126struct MessageList : public RefBase { 127 MessageList() { 128 } 129 virtual ~MessageList() { 130 } 131 std::list<sp<AMessage> > &getList() { return mList; } 132private: 133 std::list<sp<AMessage> > mList; 134 135 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 136}; 137 138static sp<DataConverter> getCopyConverter() { 139 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited 140 static sp<DataConverter> sCopyConverter; // zero-inited 141 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); 142 return sCopyConverter; 143} 144 145struct CodecObserver : public BnOMXObserver { 146 CodecObserver() {} 147 148 void setNotificationMessage(const sp<AMessage> &msg) { 149 mNotify = msg; 150 } 151 152 // from IOMXObserver 153 virtual void onMessages(const std::list<omx_message> &messages) { 154 if (messages.empty()) { 155 return; 156 } 157 158 sp<AMessage> notify = mNotify->dup(); 159 sp<MessageList> msgList = new MessageList(); 160 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 161 it != messages.cend(); ++it) { 162 const omx_message &omx_msg = *it; 163 164 sp<AMessage> msg = new AMessage; 165 msg->setInt32("type", omx_msg.type); 166 switch (omx_msg.type) { 167 case omx_message::EVENT: 168 { 169 msg->setInt32("event", omx_msg.u.event_data.event); 170 msg->setInt32("data1", omx_msg.u.event_data.data1); 171 msg->setInt32("data2", omx_msg.u.event_data.data2); 172 break; 173 } 174 175 case omx_message::EMPTY_BUFFER_DONE: 176 { 177 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 178 msg->setInt32("fence_fd", omx_msg.fenceFd); 179 break; 180 } 181 182 case omx_message::FILL_BUFFER_DONE: 183 { 184 msg->setInt32( 185 "buffer", omx_msg.u.extended_buffer_data.buffer); 186 msg->setInt32( 187 "range_offset", 188 omx_msg.u.extended_buffer_data.range_offset); 189 msg->setInt32( 190 "range_length", 191 omx_msg.u.extended_buffer_data.range_length); 192 msg->setInt32( 193 "flags", 194 omx_msg.u.extended_buffer_data.flags); 195 msg->setInt64( 196 "timestamp", 197 omx_msg.u.extended_buffer_data.timestamp); 198 msg->setInt32( 199 "fence_fd", omx_msg.fenceFd); 200 break; 201 } 202 203 case omx_message::FRAME_RENDERED: 204 { 205 msg->setInt64( 206 "media_time_us", omx_msg.u.render_data.timestamp); 207 msg->setInt64( 208 "system_nano", omx_msg.u.render_data.nanoTime); 209 break; 210 } 211 212 default: 213 ALOGE("Unrecognized message type: %d", omx_msg.type); 214 break; 215 } 216 msgList->getList().push_back(msg); 217 } 218 notify->setObject("messages", msgList); 219 notify->post(); 220 } 221 222protected: 223 virtual ~CodecObserver() {} 224 225private: 226 sp<AMessage> mNotify; 227 228 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 229}; 230 231//////////////////////////////////////////////////////////////////////////////// 232 233struct ACodec::BaseState : public AState { 234 explicit BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 235 236protected: 237 enum PortMode { 238 KEEP_BUFFERS, 239 RESUBMIT_BUFFERS, 240 FREE_BUFFERS, 241 }; 242 243 ACodec *mCodec; 244 245 virtual PortMode getPortMode(OMX_U32 portIndex); 246 247 virtual bool onMessageReceived(const sp<AMessage> &msg); 248 249 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 250 251 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 252 virtual void onInputBufferFilled(const sp<AMessage> &msg); 253 254 void postFillThisBuffer(BufferInfo *info); 255 256private: 257 // Handles an OMX message. Returns true iff message was handled. 258 bool onOMXMessage(const sp<AMessage> &msg); 259 260 // Handles a list of messages. Returns true iff messages were handled. 261 bool onOMXMessageList(const sp<AMessage> &msg); 262 263 // returns true iff this message is for this component and the component is alive 264 bool checkOMXMessage(const sp<AMessage> &msg); 265 266 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 267 268 bool onOMXFillBufferDone( 269 IOMX::buffer_id bufferID, 270 size_t rangeOffset, size_t rangeLength, 271 OMX_U32 flags, 272 int64_t timeUs, 273 int fenceFd); 274 275 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 276 277 void getMoreInputDataIfPossible(); 278 279 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 280}; 281 282//////////////////////////////////////////////////////////////////////////////// 283 284struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 285 explicit DeathNotifier(const sp<AMessage> ¬ify) 286 : mNotify(notify) { 287 } 288 289 virtual void binderDied(const wp<IBinder> &) { 290 mNotify->post(); 291 } 292 293protected: 294 virtual ~DeathNotifier() {} 295 296private: 297 sp<AMessage> mNotify; 298 299 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 300}; 301 302struct ACodec::UninitializedState : public ACodec::BaseState { 303 explicit UninitializedState(ACodec *codec); 304 305protected: 306 virtual bool onMessageReceived(const sp<AMessage> &msg); 307 virtual void stateEntered(); 308 309private: 310 void onSetup(const sp<AMessage> &msg); 311 bool onAllocateComponent(const sp<AMessage> &msg); 312 313 sp<DeathNotifier> mDeathNotifier; 314 315 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 316}; 317 318//////////////////////////////////////////////////////////////////////////////// 319 320struct ACodec::LoadedState : public ACodec::BaseState { 321 explicit LoadedState(ACodec *codec); 322 323protected: 324 virtual bool onMessageReceived(const sp<AMessage> &msg); 325 virtual void stateEntered(); 326 327private: 328 friend struct ACodec::UninitializedState; 329 330 bool onConfigureComponent(const sp<AMessage> &msg); 331 void onCreateInputSurface(const sp<AMessage> &msg); 332 void onSetInputSurface(const sp<AMessage> &msg); 333 void onStart(); 334 void onShutdown(bool keepComponentAllocated); 335 336 status_t setupInputSurface(); 337 338 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 339}; 340 341//////////////////////////////////////////////////////////////////////////////// 342 343struct ACodec::LoadedToIdleState : public ACodec::BaseState { 344 explicit LoadedToIdleState(ACodec *codec); 345 346protected: 347 virtual bool onMessageReceived(const sp<AMessage> &msg); 348 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 349 virtual void stateEntered(); 350 351private: 352 status_t allocateBuffers(); 353 354 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 355}; 356 357//////////////////////////////////////////////////////////////////////////////// 358 359struct ACodec::IdleToExecutingState : public ACodec::BaseState { 360 explicit IdleToExecutingState(ACodec *codec); 361 362protected: 363 virtual bool onMessageReceived(const sp<AMessage> &msg); 364 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 365 virtual void stateEntered(); 366 367private: 368 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 369}; 370 371//////////////////////////////////////////////////////////////////////////////// 372 373struct ACodec::ExecutingState : public ACodec::BaseState { 374 explicit ExecutingState(ACodec *codec); 375 376 void submitRegularOutputBuffers(); 377 void submitOutputMetaBuffers(); 378 void submitOutputBuffers(); 379 380 // Submit output buffers to the decoder, submit input buffers to client 381 // to fill with data. 382 void resume(); 383 384 // Returns true iff input and output buffers are in play. 385 bool active() const { return mActive; } 386 387protected: 388 virtual PortMode getPortMode(OMX_U32 portIndex); 389 virtual bool onMessageReceived(const sp<AMessage> &msg); 390 virtual void stateEntered(); 391 392 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 393 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 394 395private: 396 bool mActive; 397 398 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 399}; 400 401//////////////////////////////////////////////////////////////////////////////// 402 403struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 404 explicit OutputPortSettingsChangedState(ACodec *codec); 405 406protected: 407 virtual PortMode getPortMode(OMX_U32 portIndex); 408 virtual bool onMessageReceived(const sp<AMessage> &msg); 409 virtual void stateEntered(); 410 411 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 412 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 413 414private: 415 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 416}; 417 418//////////////////////////////////////////////////////////////////////////////// 419 420struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 421 explicit ExecutingToIdleState(ACodec *codec); 422 423protected: 424 virtual bool onMessageReceived(const sp<AMessage> &msg); 425 virtual void stateEntered(); 426 427 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 428 429 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 430 virtual void onInputBufferFilled(const sp<AMessage> &msg); 431 432private: 433 void changeStateIfWeOwnAllBuffers(); 434 435 bool mComponentNowIdle; 436 437 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 438}; 439 440//////////////////////////////////////////////////////////////////////////////// 441 442struct ACodec::IdleToLoadedState : public ACodec::BaseState { 443 explicit IdleToLoadedState(ACodec *codec); 444 445protected: 446 virtual bool onMessageReceived(const sp<AMessage> &msg); 447 virtual void stateEntered(); 448 449 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 450 451private: 452 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 453}; 454 455//////////////////////////////////////////////////////////////////////////////// 456 457struct ACodec::FlushingState : public ACodec::BaseState { 458 explicit FlushingState(ACodec *codec); 459 460protected: 461 virtual bool onMessageReceived(const sp<AMessage> &msg); 462 virtual void stateEntered(); 463 464 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 465 466 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 467 virtual void onInputBufferFilled(const sp<AMessage> &msg); 468 469private: 470 bool mFlushComplete[2]; 471 472 void changeStateIfWeOwnAllBuffers(); 473 474 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 475}; 476 477//////////////////////////////////////////////////////////////////////////////// 478 479void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 480 if (mFenceFd >= 0) { 481 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 482 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 483 } 484 mFenceFd = fenceFd; 485 mIsReadFence = false; 486} 487 488void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 489 if (mFenceFd >= 0) { 490 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 491 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 492 } 493 mFenceFd = fenceFd; 494 mIsReadFence = true; 495} 496 497void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 498 if (mFenceFd >= 0 && mIsReadFence) { 499 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 500 } 501} 502 503void ACodec::BufferInfo::checkReadFence(const char *dbg) { 504 if (mFenceFd >= 0 && !mIsReadFence) { 505 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 506 } 507} 508 509//////////////////////////////////////////////////////////////////////////////// 510 511ACodec::ACodec() 512 : mSampleRate(0), 513 mNodeGeneration(0), 514 mUsingNativeWindow(false), 515 mNativeWindowUsageBits(0), 516 mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN), 517 mIsVideo(false), 518 mIsEncoder(false), 519 mFatalError(false), 520 mShutdownInProgress(false), 521 mExplicitShutdown(false), 522 mIsLegacyVP9Decoder(false), 523 mEncoderDelay(0), 524 mEncoderPadding(0), 525 mRotationDegrees(0), 526 mChannelMaskPresent(false), 527 mChannelMask(0), 528 mDequeueCounter(0), 529 mInputMetadataType(kMetadataBufferTypeInvalid), 530 mOutputMetadataType(kMetadataBufferTypeInvalid), 531 mLegacyAdaptiveExperiment(false), 532 mMetadataBuffersToSubmit(0), 533 mNumUndequeuedBuffers(0), 534 mRepeatFrameDelayUs(-1ll), 535 mMaxPtsGapUs(-1ll), 536 mMaxFps(-1), 537 mTimePerFrameUs(-1ll), 538 mTimePerCaptureUs(-1ll), 539 mCreateInputBuffersSuspended(false), 540 mTunneled(false), 541 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0), 542 mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0) { 543 mUninitializedState = new UninitializedState(this); 544 mLoadedState = new LoadedState(this); 545 mLoadedToIdleState = new LoadedToIdleState(this); 546 mIdleToExecutingState = new IdleToExecutingState(this); 547 mExecutingState = new ExecutingState(this); 548 549 mOutputPortSettingsChangedState = 550 new OutputPortSettingsChangedState(this); 551 552 mExecutingToIdleState = new ExecutingToIdleState(this); 553 mIdleToLoadedState = new IdleToLoadedState(this); 554 mFlushingState = new FlushingState(this); 555 556 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 557 mInputEOSResult = OK; 558 559 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 560 561 changeState(mUninitializedState); 562} 563 564ACodec::~ACodec() { 565} 566 567void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 568 mNotify = msg; 569} 570 571void ACodec::initiateSetup(const sp<AMessage> &msg) { 572 msg->setWhat(kWhatSetup); 573 msg->setTarget(this); 574 msg->post(); 575} 576 577void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 578 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 579 msg->setMessage("params", params); 580 msg->post(); 581} 582 583void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 584 msg->setWhat(kWhatAllocateComponent); 585 msg->setTarget(this); 586 msg->post(); 587} 588 589void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 590 msg->setWhat(kWhatConfigureComponent); 591 msg->setTarget(this); 592 msg->post(); 593} 594 595status_t ACodec::setSurface(const sp<Surface> &surface) { 596 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 597 msg->setObject("surface", surface); 598 599 sp<AMessage> response; 600 status_t err = msg->postAndAwaitResponse(&response); 601 602 if (err == OK) { 603 (void)response->findInt32("err", &err); 604 } 605 return err; 606} 607 608void ACodec::initiateCreateInputSurface() { 609 (new AMessage(kWhatCreateInputSurface, this))->post(); 610} 611 612void ACodec::initiateSetInputSurface( 613 const sp<PersistentSurface> &surface) { 614 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 615 msg->setObject("input-surface", surface); 616 msg->post(); 617} 618 619void ACodec::signalEndOfInputStream() { 620 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 621} 622 623void ACodec::initiateStart() { 624 (new AMessage(kWhatStart, this))->post(); 625} 626 627void ACodec::signalFlush() { 628 ALOGV("[%s] signalFlush", mComponentName.c_str()); 629 (new AMessage(kWhatFlush, this))->post(); 630} 631 632void ACodec::signalResume() { 633 (new AMessage(kWhatResume, this))->post(); 634} 635 636void ACodec::initiateShutdown(bool keepComponentAllocated) { 637 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 638 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 639 msg->post(); 640 if (!keepComponentAllocated) { 641 // ensure shutdown completes in 3 seconds 642 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 643 } 644} 645 646void ACodec::signalRequestIDRFrame() { 647 (new AMessage(kWhatRequestIDRFrame, this))->post(); 648} 649 650// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 651// Some codecs may return input buffers before having them processed. 652// This causes a halt if we already signaled an EOS on the input 653// port. For now keep submitting an output buffer if there was an 654// EOS on the input port, but not yet on the output port. 655void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 656 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 657 mMetadataBuffersToSubmit > 0) { 658 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 659 } 660} 661 662status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 663 // allow keeping unset surface 664 if (surface == NULL) { 665 if (mNativeWindow != NULL) { 666 ALOGW("cannot unset a surface"); 667 return INVALID_OPERATION; 668 } 669 return OK; 670 } 671 672 // cannot switch from bytebuffers to surface 673 if (mNativeWindow == NULL) { 674 ALOGW("component was not configured with a surface"); 675 return INVALID_OPERATION; 676 } 677 678 ANativeWindow *nativeWindow = surface.get(); 679 // if we have not yet started the codec, we can simply set the native window 680 if (mBuffers[kPortIndexInput].size() == 0) { 681 mNativeWindow = surface; 682 return OK; 683 } 684 685 // we do not support changing a tunneled surface after start 686 if (mTunneled) { 687 ALOGW("cannot change tunneled surface"); 688 return INVALID_OPERATION; 689 } 690 691 int usageBits = 0; 692 // no need to reconnect as we will not dequeue all buffers 693 status_t err = setupNativeWindowSizeFormatAndUsage( 694 nativeWindow, &usageBits, 695 !storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment /* reconnect */); 696 if (err != OK) { 697 return err; 698 } 699 700 int ignoredFlags = kVideoGrallocUsage; 701 // New output surface is not allowed to add new usage flag except ignored ones. 702 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 703 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 704 return BAD_VALUE; 705 } 706 707 // get min undequeued count. We cannot switch to a surface that has a higher 708 // undequeued count than we allocated. 709 int minUndequeuedBuffers = 0; 710 err = nativeWindow->query( 711 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 712 &minUndequeuedBuffers); 713 if (err != 0) { 714 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 715 strerror(-err), -err); 716 return err; 717 } 718 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 719 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 720 minUndequeuedBuffers, mNumUndequeuedBuffers); 721 return BAD_VALUE; 722 } 723 724 // we cannot change the number of output buffers while OMX is running 725 // set up surface to the same count 726 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 727 ALOGV("setting up surface for %zu buffers", buffers.size()); 728 729 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 730 if (err != 0) { 731 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 732 -err); 733 return err; 734 } 735 736 // need to enable allocation when attaching 737 surface->getIGraphicBufferProducer()->allowAllocation(true); 738 739 // for meta data mode, we move dequeud buffers to the new surface. 740 // for non-meta mode, we must move all registered buffers 741 for (size_t i = 0; i < buffers.size(); ++i) { 742 const BufferInfo &info = buffers[i]; 743 // skip undequeued buffers for meta data mode 744 if (storingMetadataInDecodedBuffers() 745 && !mLegacyAdaptiveExperiment 746 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 747 ALOGV("skipping buffer"); 748 continue; 749 } 750 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 751 752 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 753 if (err != OK) { 754 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 755 info.mGraphicBuffer->getNativeBuffer(), 756 strerror(-err), -err); 757 return err; 758 } 759 } 760 761 // cancel undequeued buffers to new surface 762 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 763 for (size_t i = 0; i < buffers.size(); ++i) { 764 BufferInfo &info = buffers.editItemAt(i); 765 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 766 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 767 err = nativeWindow->cancelBuffer( 768 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 769 info.mFenceFd = -1; 770 if (err != OK) { 771 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 772 info.mGraphicBuffer->getNativeBuffer(), 773 strerror(-err), -err); 774 return err; 775 } 776 } 777 } 778 // disallow further allocation 779 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 780 } 781 782 // push blank buffers to previous window if requested 783 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 784 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 785 } 786 787 mNativeWindow = nativeWindow; 788 mNativeWindowUsageBits = usageBits; 789 return OK; 790} 791 792status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 793 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 794 795 CHECK(mDealer[portIndex] == NULL); 796 CHECK(mBuffers[portIndex].isEmpty()); 797 798 status_t err; 799 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 800 if (storingMetadataInDecodedBuffers() && !mLegacyAdaptiveExperiment) { 801 err = allocateOutputMetadataBuffers(); 802 } else { 803 err = allocateOutputBuffersFromNativeWindow(); 804 } 805 } else { 806 OMX_PARAM_PORTDEFINITIONTYPE def; 807 InitOMXParams(&def); 808 def.nPortIndex = portIndex; 809 810 err = mOMXNode->getParameter( 811 OMX_IndexParamPortDefinition, &def, sizeof(def)); 812 813 if (err == OK) { 814 MetadataBufferType type = 815 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 816 size_t bufSize = def.nBufferSize; 817 if (type == kMetadataBufferTypeANWBuffer) { 818 bufSize = sizeof(VideoNativeMetadata); 819 } else if (type == kMetadataBufferTypeNativeHandleSource) { 820 bufSize = sizeof(VideoNativeHandleMetadata); 821 } 822 823 // If using gralloc or native source input metadata buffers, allocate largest 824 // metadata size as we prefer to generate native source metadata, but component 825 // may require gralloc source. For camera source, allocate at least enough 826 // size for native metadata buffers. 827 size_t allottedSize = bufSize; 828 if (portIndex == kPortIndexInput && type == kMetadataBufferTypeANWBuffer) { 829 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 830 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 831 bufSize = max(bufSize, sizeof(VideoNativeMetadata)); 832 } 833 834 size_t conversionBufferSize = 0; 835 836 sp<DataConverter> converter = mConverter[portIndex]; 837 if (converter != NULL) { 838 // here we assume sane conversions of max 4:1, so result fits in int32 839 if (portIndex == kPortIndexInput) { 840 conversionBufferSize = converter->sourceSize(bufSize); 841 } else { 842 conversionBufferSize = converter->targetSize(bufSize); 843 } 844 } 845 846 size_t alignment = MemoryDealer::getAllocationAlignment(); 847 848 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", 849 mComponentName.c_str(), 850 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 851 portIndex == kPortIndexInput ? "input" : "output"); 852 853 // verify buffer sizes to avoid overflow in align() 854 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { 855 ALOGE("b/22885421"); 856 return NO_MEMORY; 857 } 858 859 // don't modify bufSize as OMX may not expect it to increase after negotiation 860 size_t alignedSize = align(bufSize, alignment); 861 size_t alignedConvSize = align(conversionBufferSize, alignment); 862 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { 863 ALOGE("b/22885421"); 864 return NO_MEMORY; 865 } 866 867 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); 868 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 869 870 const sp<AMessage> &format = 871 portIndex == kPortIndexInput ? mInputFormat : mOutputFormat; 872 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 873 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 874 if (mem == NULL || mem->pointer() == NULL) { 875 return NO_MEMORY; 876 } 877 878 BufferInfo info; 879 info.mStatus = BufferInfo::OWNED_BY_US; 880 info.mFenceFd = -1; 881 info.mRenderInfo = NULL; 882 883 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 884 mem.clear(); 885 886 void *ptr = NULL; 887 sp<NativeHandle> native_handle; 888 err = mOMXNode->allocateSecureBuffer( 889 portIndex, bufSize, &info.mBufferID, 890 &ptr, &native_handle); 891 892 info.mData = (native_handle == NULL) 893 ? new SecureBuffer(format, ptr, bufSize) 894 : new SecureBuffer(format, native_handle, bufSize); 895 info.mCodecData = info.mData; 896 } else { 897 err = mOMXNode->useBuffer(portIndex, 898 OMXBuffer(mem, allottedSize), &info.mBufferID); 899 } 900 901 if (mem != NULL) { 902 info.mCodecData = new SharedMemoryBuffer(format, mem); 903 info.mCodecRef = mem; 904 905 if (type == kMetadataBufferTypeANWBuffer) { 906 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 907 } 908 909 // if we require conversion, allocate conversion buffer for client use; 910 // otherwise, reuse codec buffer 911 if (mConverter[portIndex] != NULL) { 912 CHECK_GT(conversionBufferSize, (size_t)0); 913 mem = mDealer[portIndex]->allocate(conversionBufferSize); 914 if (mem == NULL|| mem->pointer() == NULL) { 915 return NO_MEMORY; 916 } 917 info.mData = new SharedMemoryBuffer(format, mem); 918 info.mMemRef = mem; 919 } else { 920 info.mData = info.mCodecData; 921 info.mMemRef = info.mCodecRef; 922 } 923 } 924 925 mBuffers[portIndex].push(info); 926 } 927 } 928 } 929 930 if (err != OK) { 931 return err; 932 } 933 934 sp<AMessage> notify = mNotify->dup(); 935 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 936 937 notify->setInt32("portIndex", portIndex); 938 939 sp<PortDescription> desc = new PortDescription; 940 941 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 942 const BufferInfo &info = mBuffers[portIndex][i]; 943 desc->addBuffer(info.mBufferID, info.mData); 944 } 945 946 notify->setObject("portDesc", desc); 947 notify->post(); 948 949 return OK; 950} 951 952status_t ACodec::setupNativeWindowSizeFormatAndUsage( 953 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */, 954 bool reconnect) { 955 OMX_PARAM_PORTDEFINITIONTYPE def; 956 InitOMXParams(&def); 957 def.nPortIndex = kPortIndexOutput; 958 959 status_t err = mOMXNode->getParameter( 960 OMX_IndexParamPortDefinition, &def, sizeof(def)); 961 962 if (err != OK) { 963 return err; 964 } 965 966 OMX_U32 usage = 0; 967 err = mOMXNode->getGraphicBufferUsage(kPortIndexOutput, &usage); 968 if (err != 0) { 969 ALOGW("querying usage flags from OMX IL component failed: %d", err); 970 // XXX: Currently this error is logged, but not fatal. 971 usage = 0; 972 } 973 int omxUsage = usage; 974 975 if (mFlags & kFlagIsGrallocUsageProtected) { 976 usage |= GRALLOC_USAGE_PROTECTED; 977 } 978 979 usage |= kVideoGrallocUsage; 980 *finalUsage = usage; 981 982 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 983 mLastNativeWindowDataSpace = HAL_DATASPACE_UNKNOWN; 984 985 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 986 return setNativeWindowSizeFormatAndUsage( 987 nativeWindow, 988 def.format.video.nFrameWidth, 989 def.format.video.nFrameHeight, 990 def.format.video.eColorFormat, 991 mRotationDegrees, 992 usage, 993 reconnect); 994} 995 996status_t ACodec::configureOutputBuffersFromNativeWindow( 997 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 998 OMX_U32 *minUndequeuedBuffers, bool preregister) { 999 1000 OMX_PARAM_PORTDEFINITIONTYPE def; 1001 InitOMXParams(&def); 1002 def.nPortIndex = kPortIndexOutput; 1003 1004 status_t err = mOMXNode->getParameter( 1005 OMX_IndexParamPortDefinition, &def, sizeof(def)); 1006 1007 if (err == OK) { 1008 err = setupNativeWindowSizeFormatAndUsage( 1009 mNativeWindow.get(), &mNativeWindowUsageBits, preregister /* reconnect */); 1010 } 1011 if (err != OK) { 1012 mNativeWindowUsageBits = 0; 1013 return err; 1014 } 1015 1016 // Exits here for tunneled video playback codecs -- i.e. skips native window 1017 // buffer allocation step as this is managed by the tunneled OMX omponent 1018 // itself and explicitly sets def.nBufferCountActual to 0. 1019 if (mTunneled) { 1020 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 1021 def.nBufferCountActual = 0; 1022 err = mOMXNode->setParameter( 1023 OMX_IndexParamPortDefinition, &def, sizeof(def)); 1024 1025 *minUndequeuedBuffers = 0; 1026 *bufferCount = 0; 1027 *bufferSize = 0; 1028 return err; 1029 } 1030 1031 *minUndequeuedBuffers = 0; 1032 err = mNativeWindow->query( 1033 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 1034 (int *)minUndequeuedBuffers); 1035 1036 if (err != 0) { 1037 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 1038 strerror(-err), -err); 1039 return err; 1040 } 1041 1042 // FIXME: assume that surface is controlled by app (native window 1043 // returns the number for the case when surface is not controlled by app) 1044 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 1045 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 1046 1047 // Use conservative allocation while also trying to reduce starvation 1048 // 1049 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 1050 // minimum needed for the consumer to be able to work 1051 // 2. try to allocate two (2) additional buffers to reduce starvation from 1052 // the consumer 1053 // plus an extra buffer to account for incorrect minUndequeuedBufs 1054 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 1055 OMX_U32 newBufferCount = 1056 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1057 def.nBufferCountActual = newBufferCount; 1058 err = mOMXNode->setParameter( 1059 OMX_IndexParamPortDefinition, &def, sizeof(def)); 1060 1061 if (err == OK) { 1062 *minUndequeuedBuffers += extraBuffers; 1063 break; 1064 } 1065 1066 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1067 mComponentName.c_str(), newBufferCount, err); 1068 /* exit condition */ 1069 if (extraBuffers == 0) { 1070 return err; 1071 } 1072 } 1073 1074 err = native_window_set_buffer_count( 1075 mNativeWindow.get(), def.nBufferCountActual); 1076 1077 if (err != 0) { 1078 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1079 -err); 1080 return err; 1081 } 1082 1083 *bufferCount = def.nBufferCountActual; 1084 *bufferSize = def.nBufferSize; 1085 return err; 1086} 1087 1088status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1089 // This method only handles the non-metadata mode, or legacy metadata mode 1090 // (where the headers for each buffer id will be fixed). Non-legacy metadata 1091 // mode shouldn't go through this path. 1092 CHECK(!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment); 1093 1094 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1095 status_t err = configureOutputBuffersFromNativeWindow( 1096 &bufferCount, &bufferSize, &minUndequeuedBuffers, true /* preregister */); 1097 if (err != 0) 1098 return err; 1099 mNumUndequeuedBuffers = minUndequeuedBuffers; 1100 1101 static_cast<Surface*>(mNativeWindow.get()) 1102 ->getIGraphicBufferProducer()->allowAllocation(true); 1103 1104 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1105 "output port", 1106 mComponentName.c_str(), bufferCount, bufferSize); 1107 1108 // Dequeue buffers and send them to OMX 1109 for (OMX_U32 i = 0; i < bufferCount; i++) { 1110 ANativeWindowBuffer *buf; 1111 int fenceFd; 1112 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1113 if (err != 0) { 1114 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1115 break; 1116 } 1117 1118 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1119 BufferInfo info; 1120 info.mStatus = BufferInfo::OWNED_BY_US; 1121 info.mFenceFd = fenceFd; 1122 info.mIsReadFence = false; 1123 info.mRenderInfo = NULL; 1124 info.mGraphicBuffer = graphicBuffer; 1125 1126 // TODO: We shouln't need to create MediaCodecBuffer. In metadata mode 1127 // OMX doesn't use the shared memory buffer, but some code still 1128 // access info.mData. Create an ABuffer as a placeholder. 1129 if (storingMetadataInDecodedBuffers()) { 1130 info.mData = new MediaCodecBuffer(mOutputFormat, new ABuffer(bufferSize)); 1131 info.mCodecData = info.mData; 1132 } 1133 1134 mBuffers[kPortIndexOutput].push(info); 1135 1136 IOMX::buffer_id bufferId; 1137 err = mOMXNode->useBuffer(kPortIndexOutput, graphicBuffer, &bufferId); 1138 if (err != 0) { 1139 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1140 "%d", i, err); 1141 break; 1142 } 1143 1144 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1145 1146 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1147 mComponentName.c_str(), 1148 bufferId, graphicBuffer.get()); 1149 } 1150 1151 OMX_U32 cancelStart; 1152 OMX_U32 cancelEnd; 1153 1154 if (err != 0 || storingMetadataInDecodedBuffers()) { 1155 // If an error occurred while dequeuing we need to cancel any buffers 1156 // that were dequeued. Also cancel all if we're in legacy metadata mode. 1157 cancelStart = 0; 1158 cancelEnd = mBuffers[kPortIndexOutput].size(); 1159 } else { 1160 // Return the required minimum undequeued buffers to the native window. 1161 cancelStart = bufferCount - minUndequeuedBuffers; 1162 cancelEnd = bufferCount; 1163 } 1164 1165 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1166 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1167 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1168 status_t error = cancelBufferToNativeWindow(info); 1169 if (err == 0) { 1170 err = error; 1171 } 1172 } 1173 } 1174 1175 static_cast<Surface*>(mNativeWindow.get()) 1176 ->getIGraphicBufferProducer()->allowAllocation(false); 1177 1178 if (storingMetadataInDecodedBuffers()) { 1179 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1180 } 1181 1182 return err; 1183} 1184 1185status_t ACodec::allocateOutputMetadataBuffers() { 1186 CHECK(storingMetadataInDecodedBuffers() && !mLegacyAdaptiveExperiment); 1187 1188 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1189 status_t err = configureOutputBuffersFromNativeWindow( 1190 &bufferCount, &bufferSize, &minUndequeuedBuffers, 1191 false /* preregister */); 1192 if (err != 0) 1193 return err; 1194 mNumUndequeuedBuffers = minUndequeuedBuffers; 1195 1196 ALOGV("[%s] Allocating %u meta buffers on output port", 1197 mComponentName.c_str(), bufferCount); 1198 1199 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1200 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1201 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1202 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1203 1204 for (OMX_U32 i = 0; i < bufferCount; i++) { 1205 BufferInfo info; 1206 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1207 info.mFenceFd = -1; 1208 info.mRenderInfo = NULL; 1209 info.mGraphicBuffer = NULL; 1210 info.mDequeuedAt = mDequeueCounter; 1211 1212 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1213 if (mem == NULL || mem->pointer() == NULL) { 1214 return NO_MEMORY; 1215 } 1216 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1217 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1218 } 1219 info.mData = new SharedMemoryBuffer(mOutputFormat, mem); 1220 info.mMemRef = mem; 1221 info.mCodecData = info.mData; 1222 info.mCodecRef = mem; 1223 1224 err = mOMXNode->useBuffer(kPortIndexOutput, mem, &info.mBufferID); 1225 mBuffers[kPortIndexOutput].push(info); 1226 1227 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1228 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1229 } 1230 1231 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1232 return err; 1233} 1234 1235status_t ACodec::submitOutputMetadataBuffer() { 1236 CHECK(storingMetadataInDecodedBuffers()); 1237 if (mMetadataBuffersToSubmit == 0) 1238 return OK; 1239 1240 BufferInfo *info = dequeueBufferFromNativeWindow(); 1241 if (info == NULL) { 1242 return ERROR_IO; 1243 } 1244 1245 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1246 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1247 1248 --mMetadataBuffersToSubmit; 1249 info->checkWriteFence("submitOutputMetadataBuffer"); 1250 return fillBuffer(info); 1251} 1252 1253status_t ACodec::waitForFence(int fd, const char *dbg ) { 1254 status_t res = OK; 1255 if (fd >= 0) { 1256 sp<Fence> fence = new Fence(fd); 1257 res = fence->wait(IOMX::kFenceTimeoutMs); 1258 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1259 } 1260 return res; 1261} 1262 1263// static 1264const char *ACodec::_asString(BufferInfo::Status s) { 1265 switch (s) { 1266 case BufferInfo::OWNED_BY_US: return "OUR"; 1267 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1268 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1269 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1270 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1271 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1272 default: return "?"; 1273 } 1274} 1275 1276void ACodec::dumpBuffers(OMX_U32 portIndex) { 1277 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1278 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1279 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1280 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1281 const BufferInfo &info = mBuffers[portIndex][i]; 1282 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1283 i, info.mBufferID, info.mGraphicBuffer.get(), 1284 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1285 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1286 } 1287} 1288 1289status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1290 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1291 1292 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1293 mComponentName.c_str(), info->mBufferID); 1294 1295 info->checkWriteFence("cancelBufferToNativeWindow"); 1296 int err = mNativeWindow->cancelBuffer( 1297 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1298 info->mFenceFd = -1; 1299 1300 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1301 mComponentName.c_str(), info->mBufferID); 1302 // change ownership even if cancelBuffer fails 1303 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1304 1305 return err; 1306} 1307 1308void ACodec::updateRenderInfoForDequeuedBuffer( 1309 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1310 1311 info->mRenderInfo = 1312 mRenderTracker.updateInfoForDequeuedBuffer( 1313 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1314 1315 // check for any fences already signaled 1316 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1317} 1318 1319void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1320 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1321 mRenderTracker.dumpRenderQueue(); 1322 } 1323} 1324 1325void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1326 sp<AMessage> msg = mNotify->dup(); 1327 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1328 std::list<FrameRenderTracker::Info> done = 1329 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1330 1331 // unlink untracked frames 1332 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1333 it != done.cend(); ++it) { 1334 ssize_t index = it->getIndex(); 1335 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1336 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1337 } else if (index >= 0) { 1338 // THIS SHOULD NEVER HAPPEN 1339 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1340 } 1341 } 1342 1343 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1344 msg->post(); 1345 } 1346} 1347 1348ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1349 ANativeWindowBuffer *buf; 1350 CHECK(mNativeWindow.get() != NULL); 1351 1352 if (mTunneled) { 1353 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1354 " video playback mode mode!"); 1355 return NULL; 1356 } 1357 1358 if (mFatalError) { 1359 ALOGW("not dequeuing from native window due to fatal error"); 1360 return NULL; 1361 } 1362 1363 int fenceFd = -1; 1364 do { 1365 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1366 if (err != 0) { 1367 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1368 return NULL; 1369 } 1370 1371 bool stale = false; 1372 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1373 i--; 1374 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1375 1376 if (info->mGraphicBuffer != NULL && 1377 info->mGraphicBuffer->handle == buf->handle) { 1378 // Since consumers can attach buffers to BufferQueues, it is possible 1379 // that a known yet stale buffer can return from a surface that we 1380 // once used. We can simply ignore this as we have already dequeued 1381 // this buffer properly. NOTE: this does not eliminate all cases, 1382 // e.g. it is possible that we have queued the valid buffer to the 1383 // NW, and a stale copy of the same buffer gets dequeued - which will 1384 // be treated as the valid buffer by ACodec. 1385 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1386 ALOGI("dequeued stale buffer %p. discarding", buf); 1387 stale = true; 1388 break; 1389 } 1390 1391 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1392 info->mStatus = BufferInfo::OWNED_BY_US; 1393 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1394 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1395 return info; 1396 } 1397 } 1398 1399 // It is also possible to receive a previously unregistered buffer 1400 // in non-meta mode. These should be treated as stale buffers. The 1401 // same is possible in meta mode, in which case, it will be treated 1402 // as a normal buffer, which is not desirable. 1403 // TODO: fix this. 1404 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1405 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1406 stale = true; 1407 } 1408 if (stale) { 1409 // TODO: detach stale buffer, but there is no API yet to do it. 1410 buf = NULL; 1411 } 1412 } while (buf == NULL); 1413 1414 // get oldest undequeued buffer 1415 BufferInfo *oldest = NULL; 1416 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1417 i--; 1418 BufferInfo *info = 1419 &mBuffers[kPortIndexOutput].editItemAt(i); 1420 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1421 (oldest == NULL || 1422 // avoid potential issues from counter rolling over 1423 mDequeueCounter - info->mDequeuedAt > 1424 mDequeueCounter - oldest->mDequeuedAt)) { 1425 oldest = info; 1426 } 1427 } 1428 1429 // it is impossible dequeue a buffer when there are no buffers with ANW 1430 CHECK(oldest != NULL); 1431 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1432 // while loop above does not complete 1433 CHECK(storingMetadataInDecodedBuffers()); 1434 1435 if (storingMetadataInDecodedBuffers() && mLegacyAdaptiveExperiment) { 1436 // If we're here while running legacy experiment, we dequeued some 1437 // unrecognized buffers, and the experiment can't continue. 1438 ALOGE("Legacy experiment failed, drop back to metadata mode"); 1439 mLegacyAdaptiveExperiment = false; 1440 } 1441 // discard buffer in LRU info and replace with new buffer 1442 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1443 oldest->mStatus = BufferInfo::OWNED_BY_US; 1444 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1445 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1446 oldest->mRenderInfo = NULL; 1447 1448 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1449 VideoGrallocMetadata *grallocMeta = 1450 reinterpret_cast<VideoGrallocMetadata *>(oldest->mCodecData->base()); 1451 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1452 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1453 mDequeueCounter - oldest->mDequeuedAt, 1454 (void *)(uintptr_t)grallocMeta->pHandle, 1455 oldest->mGraphicBuffer->handle, oldest->mCodecData->base()); 1456 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1457 VideoNativeMetadata *nativeMeta = 1458 reinterpret_cast<VideoNativeMetadata *>(oldest->mCodecData->base()); 1459 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1460 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1461 mDequeueCounter - oldest->mDequeuedAt, 1462 (void *)(uintptr_t)nativeMeta->pBuffer, 1463 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mCodecData->base()); 1464 } 1465 1466 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1467 return oldest; 1468} 1469 1470status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1471 status_t err = OK; 1472 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1473 i--; 1474 status_t err2 = freeBuffer(portIndex, i); 1475 if (err == OK) { 1476 err = err2; 1477 } 1478 } 1479 1480 // clear mDealer even on an error 1481 mDealer[portIndex].clear(); 1482 return err; 1483} 1484 1485status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1486 status_t err = OK; 1487 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1488 i--; 1489 BufferInfo *info = 1490 &mBuffers[kPortIndexOutput].editItemAt(i); 1491 1492 // At this time some buffers may still be with the component 1493 // or being drained. 1494 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1495 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1496 status_t err2 = freeBuffer(kPortIndexOutput, i); 1497 if (err == OK) { 1498 err = err2; 1499 } 1500 } 1501 } 1502 1503 return err; 1504} 1505 1506status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1507 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1508 status_t err = OK; 1509 1510 // there should not be any fences in the metadata 1511 MetadataBufferType type = 1512 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1513 if (type == kMetadataBufferTypeANWBuffer && info->mCodecData != NULL 1514 && info->mCodecData->size() >= sizeof(VideoNativeMetadata)) { 1515 int fenceFd = ((VideoNativeMetadata *)info->mCodecData->base())->nFenceFd; 1516 if (fenceFd >= 0) { 1517 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1518 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1519 } 1520 } 1521 1522 switch (info->mStatus) { 1523 case BufferInfo::OWNED_BY_US: 1524 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1525 (void)cancelBufferToNativeWindow(info); 1526 } 1527 // fall through 1528 1529 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1530 err = mOMXNode->freeBuffer(portIndex, info->mBufferID); 1531 break; 1532 1533 default: 1534 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1535 err = FAILED_TRANSACTION; 1536 break; 1537 } 1538 1539 if (info->mFenceFd >= 0) { 1540 ::close(info->mFenceFd); 1541 } 1542 1543 if (portIndex == kPortIndexOutput) { 1544 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1545 info->mRenderInfo = NULL; 1546 } 1547 1548 // remove buffer even if mOMXNode->freeBuffer fails 1549 mBuffers[portIndex].removeAt(i); 1550 return err; 1551} 1552 1553ACodec::BufferInfo *ACodec::findBufferByID( 1554 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1555 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1556 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1557 1558 if (info->mBufferID == bufferID) { 1559 if (index != NULL) { 1560 *index = i; 1561 } 1562 return info; 1563 } 1564 } 1565 1566 ALOGE("Could not find buffer with ID %u", bufferID); 1567 return NULL; 1568} 1569 1570status_t ACodec::fillBuffer(BufferInfo *info) { 1571 status_t err; 1572 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 1573 err = mOMXNode->fillBuffer( 1574 info->mBufferID, OMXBuffer::sPreset, info->mFenceFd); 1575 } else { 1576 err = mOMXNode->fillBuffer( 1577 info->mBufferID, info->mGraphicBuffer, info->mFenceFd); 1578 } 1579 1580 info->mFenceFd = -1; 1581 if (err == OK) { 1582 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1583 } 1584 return err; 1585} 1586 1587status_t ACodec::setComponentRole( 1588 bool isEncoder, const char *mime) { 1589 const char *role = GetComponentRole(isEncoder, mime); 1590 if (role == NULL) { 1591 return BAD_VALUE; 1592 } 1593 status_t err = SetComponentRole(mOMXNode, role); 1594 if (err != OK) { 1595 ALOGW("[%s] Failed to set standard component role '%s'.", 1596 mComponentName.c_str(), role); 1597 } 1598 return err; 1599} 1600 1601status_t ACodec::configureCodec( 1602 const char *mime, const sp<AMessage> &msg) { 1603 int32_t encoder; 1604 if (!msg->findInt32("encoder", &encoder)) { 1605 encoder = false; 1606 } 1607 1608 sp<AMessage> inputFormat = new AMessage; 1609 sp<AMessage> outputFormat = new AMessage; 1610 mConfigFormat = msg; 1611 1612 mIsEncoder = encoder; 1613 1614 mInputMetadataType = kMetadataBufferTypeInvalid; 1615 mOutputMetadataType = kMetadataBufferTypeInvalid; 1616 1617 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1618 1619 if (err != OK) { 1620 return err; 1621 } 1622 1623 int32_t bitRate = 0; 1624 // FLAC encoder doesn't need a bitrate, other encoders do 1625 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1626 && !msg->findInt32("bitrate", &bitRate)) { 1627 return INVALID_OPERATION; 1628 } 1629 1630 // propagate bitrate to the output so that the muxer has it 1631 if (encoder && msg->findInt32("bitrate", &bitRate)) { 1632 // Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the 1633 // average bitrate. We've been setting both bitrate and max-bitrate to this same value. 1634 outputFormat->setInt32("bitrate", bitRate); 1635 outputFormat->setInt32("max-bitrate", bitRate); 1636 } 1637 1638 int32_t storeMeta; 1639 if (encoder 1640 && msg->findInt32("android._input-metadata-buffer-type", &storeMeta) 1641 && storeMeta != kMetadataBufferTypeInvalid) { 1642 mInputMetadataType = (MetadataBufferType)storeMeta; 1643 err = mOMXNode->storeMetaDataInBuffers( 1644 kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1645 if (err != OK) { 1646 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1647 mComponentName.c_str(), err); 1648 1649 return err; 1650 } else if (storeMeta == kMetadataBufferTypeANWBuffer 1651 && mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1652 // IOMX translates ANWBuffers to gralloc source already. 1653 mInputMetadataType = (MetadataBufferType)storeMeta; 1654 } 1655 1656 uint32_t usageBits; 1657 if (mOMXNode->getParameter( 1658 (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1659 &usageBits, sizeof(usageBits)) == OK) { 1660 inputFormat->setInt32( 1661 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1662 } 1663 } 1664 1665 int32_t prependSPSPPS = 0; 1666 if (encoder 1667 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1668 && prependSPSPPS != 0) { 1669 OMX_INDEXTYPE index; 1670 err = mOMXNode->getExtensionIndex( 1671 "OMX.google.android.index.prependSPSPPSToIDRFrames", &index); 1672 1673 if (err == OK) { 1674 PrependSPSPPSToIDRFramesParams params; 1675 InitOMXParams(¶ms); 1676 params.bEnable = OMX_TRUE; 1677 1678 err = mOMXNode->setParameter(index, ¶ms, sizeof(params)); 1679 } 1680 1681 if (err != OK) { 1682 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1683 "IDR frames. (err %d)", err); 1684 1685 return err; 1686 } 1687 } 1688 1689 // Only enable metadata mode on encoder output if encoder can prepend 1690 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1691 // opaque handle, to which we don't have access. 1692 int32_t video = !strncasecmp(mime, "video/", 6); 1693 mIsVideo = video; 1694 if (encoder && video) { 1695 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1696 && msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta) 1697 && storeMeta != 0); 1698 1699 mOutputMetadataType = kMetadataBufferTypeNativeHandleSource; 1700 err = mOMXNode->storeMetaDataInBuffers(kPortIndexOutput, enable, &mOutputMetadataType); 1701 if (err != OK) { 1702 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1703 mComponentName.c_str(), err); 1704 } 1705 1706 if (!msg->findInt64( 1707 "repeat-previous-frame-after", 1708 &mRepeatFrameDelayUs)) { 1709 mRepeatFrameDelayUs = -1ll; 1710 } 1711 1712 // only allow 32-bit value, since we pass it as U32 to OMX. 1713 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1714 mMaxPtsGapUs = -1ll; 1715 } else if (mMaxPtsGapUs > INT32_MAX || mMaxPtsGapUs < 0) { 1716 ALOGW("Unsupported value for max pts gap %lld", (long long) mMaxPtsGapUs); 1717 mMaxPtsGapUs = -1ll; 1718 } 1719 1720 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1721 mMaxFps = -1; 1722 } 1723 1724 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1725 mTimePerCaptureUs = -1ll; 1726 } 1727 1728 if (!msg->findInt32( 1729 "create-input-buffers-suspended", 1730 (int32_t*)&mCreateInputBuffersSuspended)) { 1731 mCreateInputBuffersSuspended = false; 1732 } 1733 } 1734 1735 // NOTE: we only use native window for video decoders 1736 sp<RefBase> obj; 1737 bool haveNativeWindow = msg->findObject("native-window", &obj) 1738 && obj != NULL && video && !encoder; 1739 mUsingNativeWindow = haveNativeWindow; 1740 mLegacyAdaptiveExperiment = false; 1741 if (video && !encoder) { 1742 inputFormat->setInt32("adaptive-playback", false); 1743 1744 int32_t usageProtected; 1745 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1746 if (!haveNativeWindow) { 1747 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1748 return PERMISSION_DENIED; 1749 } 1750 mFlags |= kFlagIsGrallocUsageProtected; 1751 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1752 } 1753 1754 if (mFlags & kFlagIsSecure) { 1755 // use native_handles for secure input buffers 1756 err = mOMXNode->enableNativeBuffers( 1757 kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1758 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1759 err = OK; // ignore error for now 1760 } 1761 } 1762 if (haveNativeWindow) { 1763 sp<ANativeWindow> nativeWindow = 1764 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1765 1766 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1767 int32_t autoFrc; 1768 if (msg->findInt32("auto-frc", &autoFrc)) { 1769 bool enabled = autoFrc; 1770 OMX_CONFIG_BOOLEANTYPE config; 1771 InitOMXParams(&config); 1772 config.bEnabled = (OMX_BOOL)enabled; 1773 status_t temp = mOMXNode->setConfig( 1774 (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1775 &config, sizeof(config)); 1776 if (temp == OK) { 1777 outputFormat->setInt32("auto-frc", enabled); 1778 } else if (enabled) { 1779 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1780 } 1781 } 1782 // END of temporary support for automatic FRC 1783 1784 int32_t tunneled; 1785 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1786 tunneled != 0) { 1787 ALOGI("Configuring TUNNELED video playback."); 1788 mTunneled = true; 1789 1790 int32_t audioHwSync = 0; 1791 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1792 ALOGW("No Audio HW Sync provided for video tunnel"); 1793 } 1794 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1795 if (err != OK) { 1796 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1797 audioHwSync, nativeWindow.get()); 1798 return err; 1799 } 1800 1801 int32_t maxWidth = 0, maxHeight = 0; 1802 if (msg->findInt32("max-width", &maxWidth) && 1803 msg->findInt32("max-height", &maxHeight)) { 1804 1805 err = mOMXNode->prepareForAdaptivePlayback( 1806 kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1807 if (err != OK) { 1808 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1809 mComponentName.c_str(), err); 1810 // allow failure 1811 err = OK; 1812 } else { 1813 inputFormat->setInt32("max-width", maxWidth); 1814 inputFormat->setInt32("max-height", maxHeight); 1815 inputFormat->setInt32("adaptive-playback", true); 1816 } 1817 } 1818 } else { 1819 ALOGV("Configuring CPU controlled video playback."); 1820 mTunneled = false; 1821 1822 // Explicity reset the sideband handle of the window for 1823 // non-tunneled video in case the window was previously used 1824 // for a tunneled video playback. 1825 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1826 if (err != OK) { 1827 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1828 return err; 1829 } 1830 1831 // Always try to enable dynamic output buffers on native surface 1832 mOutputMetadataType = kMetadataBufferTypeANWBuffer; 1833 err = mOMXNode->storeMetaDataInBuffers( 1834 kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1835 if (err != OK) { 1836 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1837 mComponentName.c_str(), err); 1838 1839 // if adaptive playback has been requested, try JB fallback 1840 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1841 // LARGE MEMORY REQUIREMENT 1842 1843 // we will not do adaptive playback on software accessed 1844 // surfaces as they never had to respond to changes in the 1845 // crop window, and we don't trust that they will be able to. 1846 int usageBits = 0; 1847 bool canDoAdaptivePlayback; 1848 1849 if (nativeWindow->query( 1850 nativeWindow.get(), 1851 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1852 &usageBits) != OK) { 1853 canDoAdaptivePlayback = false; 1854 } else { 1855 canDoAdaptivePlayback = 1856 (usageBits & 1857 (GRALLOC_USAGE_SW_READ_MASK | 1858 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1859 } 1860 1861 int32_t maxWidth = 0, maxHeight = 0; 1862 if (canDoAdaptivePlayback && 1863 msg->findInt32("max-width", &maxWidth) && 1864 msg->findInt32("max-height", &maxHeight)) { 1865 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1866 mComponentName.c_str(), maxWidth, maxHeight); 1867 1868 err = mOMXNode->prepareForAdaptivePlayback( 1869 kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1870 ALOGW_IF(err != OK, 1871 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1872 mComponentName.c_str(), err); 1873 1874 if (err == OK) { 1875 inputFormat->setInt32("max-width", maxWidth); 1876 inputFormat->setInt32("max-height", maxHeight); 1877 inputFormat->setInt32("adaptive-playback", true); 1878 } 1879 } 1880 // allow failure 1881 err = OK; 1882 } else { 1883 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1884 mComponentName.c_str()); 1885 CHECK(storingMetadataInDecodedBuffers()); 1886 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 1887 "legacy-adaptive", !msg->contains("no-experiments")); 1888 1889 inputFormat->setInt32("adaptive-playback", true); 1890 } 1891 1892 int32_t push; 1893 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 1894 && push != 0) { 1895 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1896 } 1897 } 1898 1899 int32_t rotationDegrees; 1900 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 1901 mRotationDegrees = rotationDegrees; 1902 } else { 1903 mRotationDegrees = 0; 1904 } 1905 } 1906 1907 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 1908 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 1909 // invalid encodings will default to PCM-16bit in setupRawAudioFormat. 1910 1911 if (video) { 1912 // determine need for software renderer 1913 bool usingSwRenderer = false; 1914 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 1915 usingSwRenderer = true; 1916 haveNativeWindow = false; 1917 } 1918 1919 if (encoder) { 1920 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 1921 } else { 1922 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 1923 } 1924 1925 if (err != OK) { 1926 return err; 1927 } 1928 1929 if (haveNativeWindow) { 1930 mNativeWindow = static_cast<Surface *>(obj.get()); 1931 } 1932 1933 // initialize native window now to get actual output format 1934 // TODO: this is needed for some encoders even though they don't use native window 1935 err = initNativeWindow(); 1936 if (err != OK) { 1937 return err; 1938 } 1939 1940 // fallback for devices that do not handle flex-YUV for native buffers 1941 if (haveNativeWindow) { 1942 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 1943 if (msg->findInt32("color-format", &requestedColorFormat) && 1944 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 1945 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 1946 if (err != OK) { 1947 return err; 1948 } 1949 int32_t colorFormat = OMX_COLOR_FormatUnused; 1950 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 1951 if (!outputFormat->findInt32("color-format", &colorFormat)) { 1952 ALOGE("ouptut port did not have a color format (wrong domain?)"); 1953 return BAD_VALUE; 1954 } 1955 ALOGD("[%s] Requested output format %#x and got %#x.", 1956 mComponentName.c_str(), requestedColorFormat, colorFormat); 1957 if (!IsFlexibleColorFormat( 1958 mOMXNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 1959 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 1960 // device did not handle flex-YUV request for native window, fall back 1961 // to SW renderer 1962 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 1963 mNativeWindow.clear(); 1964 mNativeWindowUsageBits = 0; 1965 haveNativeWindow = false; 1966 usingSwRenderer = true; 1967 if (storingMetadataInDecodedBuffers()) { 1968 err = mOMXNode->storeMetaDataInBuffers( 1969 kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 1970 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 1971 // TODO: implement adaptive-playback support for bytebuffer mode. 1972 // This is done by SW codecs, but most HW codecs don't support it. 1973 inputFormat->setInt32("adaptive-playback", false); 1974 } 1975 if (err == OK) { 1976 err = mOMXNode->enableNativeBuffers( 1977 kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 1978 } 1979 if (mFlags & kFlagIsGrallocUsageProtected) { 1980 // fallback is not supported for protected playback 1981 err = PERMISSION_DENIED; 1982 } else if (err == OK) { 1983 err = setupVideoDecoder( 1984 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 1985 } 1986 } 1987 } 1988 } 1989 1990 if (usingSwRenderer) { 1991 outputFormat->setInt32("using-sw-renderer", 1); 1992 } 1993 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 1994 int32_t numChannels, sampleRate; 1995 if (!msg->findInt32("channel-count", &numChannels) 1996 || !msg->findInt32("sample-rate", &sampleRate)) { 1997 // Since we did not always check for these, leave them optional 1998 // and have the decoder figure it all out. 1999 err = OK; 2000 } else { 2001 err = setupRawAudioFormat( 2002 encoder ? kPortIndexInput : kPortIndexOutput, 2003 sampleRate, 2004 numChannels); 2005 } 2006 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2007 int32_t numChannels, sampleRate; 2008 if (!msg->findInt32("channel-count", &numChannels) 2009 || !msg->findInt32("sample-rate", &sampleRate)) { 2010 err = INVALID_OPERATION; 2011 } else { 2012 int32_t isADTS, aacProfile; 2013 int32_t sbrMode; 2014 int32_t maxOutputChannelCount; 2015 int32_t pcmLimiterEnable; 2016 drcParams_t drc; 2017 if (!msg->findInt32("is-adts", &isADTS)) { 2018 isADTS = 0; 2019 } 2020 if (!msg->findInt32("aac-profile", &aacProfile)) { 2021 aacProfile = OMX_AUDIO_AACObjectNull; 2022 } 2023 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2024 sbrMode = -1; 2025 } 2026 2027 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2028 maxOutputChannelCount = -1; 2029 } 2030 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2031 // value is unknown 2032 pcmLimiterEnable = -1; 2033 } 2034 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2035 // value is unknown 2036 drc.encodedTargetLevel = -1; 2037 } 2038 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2039 // value is unknown 2040 drc.drcCut = -1; 2041 } 2042 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2043 // value is unknown 2044 drc.drcBoost = -1; 2045 } 2046 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2047 // value is unknown 2048 drc.heavyCompression = -1; 2049 } 2050 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2051 // value is unknown 2052 drc.targetRefLevel = -1; 2053 } 2054 2055 err = setupAACCodec( 2056 encoder, numChannels, sampleRate, bitRate, aacProfile, 2057 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2058 pcmLimiterEnable); 2059 } 2060 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2061 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2062 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2063 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2064 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2065 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2066 // These are PCM-like formats with a fixed sample rate but 2067 // a variable number of channels. 2068 2069 int32_t numChannels; 2070 if (!msg->findInt32("channel-count", &numChannels)) { 2071 err = INVALID_OPERATION; 2072 } else { 2073 int32_t sampleRate; 2074 if (!msg->findInt32("sample-rate", &sampleRate)) { 2075 sampleRate = 8000; 2076 } 2077 err = setupG711Codec(encoder, sampleRate, numChannels); 2078 } 2079 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2080 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2081 if (encoder && 2082 (!msg->findInt32("channel-count", &numChannels) 2083 || !msg->findInt32("sample-rate", &sampleRate))) { 2084 ALOGE("missing channel count or sample rate for FLAC encoder"); 2085 err = INVALID_OPERATION; 2086 } else { 2087 if (encoder) { 2088 if (!msg->findInt32( 2089 "complexity", &compressionLevel) && 2090 !msg->findInt32( 2091 "flac-compression-level", &compressionLevel)) { 2092 compressionLevel = 5; // default FLAC compression level 2093 } else if (compressionLevel < 0) { 2094 ALOGW("compression level %d outside [0..8] range, " 2095 "using 0", 2096 compressionLevel); 2097 compressionLevel = 0; 2098 } else if (compressionLevel > 8) { 2099 ALOGW("compression level %d outside [0..8] range, " 2100 "using 8", 2101 compressionLevel); 2102 compressionLevel = 8; 2103 } 2104 } 2105 err = setupFlacCodec( 2106 encoder, numChannels, sampleRate, compressionLevel); 2107 } 2108 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2109 int32_t numChannels, sampleRate; 2110 if (encoder 2111 || !msg->findInt32("channel-count", &numChannels) 2112 || !msg->findInt32("sample-rate", &sampleRate)) { 2113 err = INVALID_OPERATION; 2114 } else { 2115 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); 2116 } 2117 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2118 int32_t numChannels; 2119 int32_t sampleRate; 2120 if (!msg->findInt32("channel-count", &numChannels) 2121 || !msg->findInt32("sample-rate", &sampleRate)) { 2122 err = INVALID_OPERATION; 2123 } else { 2124 err = setupAC3Codec(encoder, numChannels, sampleRate); 2125 } 2126 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2127 int32_t numChannels; 2128 int32_t sampleRate; 2129 if (!msg->findInt32("channel-count", &numChannels) 2130 || !msg->findInt32("sample-rate", &sampleRate)) { 2131 err = INVALID_OPERATION; 2132 } else { 2133 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2134 } 2135 } 2136 2137 if (err != OK) { 2138 return err; 2139 } 2140 2141 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2142 mEncoderDelay = 0; 2143 } 2144 2145 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2146 mEncoderPadding = 0; 2147 } 2148 2149 if (msg->findInt32("channel-mask", &mChannelMask)) { 2150 mChannelMaskPresent = true; 2151 } else { 2152 mChannelMaskPresent = false; 2153 } 2154 2155 int32_t maxInputSize; 2156 if (msg->findInt32("max-input-size", &maxInputSize)) { 2157 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2158 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2159 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2160 } 2161 2162 int32_t priority; 2163 if (msg->findInt32("priority", &priority)) { 2164 err = setPriority(priority); 2165 } 2166 2167 int32_t rateInt = -1; 2168 float rateFloat = -1; 2169 if (!msg->findFloat("operating-rate", &rateFloat)) { 2170 msg->findInt32("operating-rate", &rateInt); 2171 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2172 } 2173 if (rateFloat > 0) { 2174 err = setOperatingRate(rateFloat, video); 2175 } 2176 2177 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame. 2178 mBaseOutputFormat = outputFormat; 2179 mLastOutputFormat.clear(); 2180 2181 err = getPortFormat(kPortIndexInput, inputFormat); 2182 if (err == OK) { 2183 err = getPortFormat(kPortIndexOutput, outputFormat); 2184 if (err == OK) { 2185 mInputFormat = inputFormat; 2186 mOutputFormat = outputFormat; 2187 } 2188 } 2189 2190 // create data converters if needed 2191 if (!video && err == OK) { 2192 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 2193 if (encoder) { 2194 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2195 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); 2196 if (mConverter[kPortIndexInput] != NULL) { 2197 mInputFormat->setInt32("pcm-encoding", pcmEncoding); 2198 } 2199 } else { 2200 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2201 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 2202 if (mConverter[kPortIndexOutput] != NULL) { 2203 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 2204 } 2205 } 2206 } 2207 2208 return err; 2209} 2210 2211status_t ACodec::setPriority(int32_t priority) { 2212 if (priority < 0) { 2213 return BAD_VALUE; 2214 } 2215 OMX_PARAM_U32TYPE config; 2216 InitOMXParams(&config); 2217 config.nU32 = (OMX_U32)priority; 2218 status_t temp = mOMXNode->setConfig( 2219 (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2220 &config, sizeof(config)); 2221 if (temp != OK) { 2222 ALOGI("codec does not support config priority (err %d)", temp); 2223 } 2224 return OK; 2225} 2226 2227status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2228 if (rateFloat < 0) { 2229 return BAD_VALUE; 2230 } 2231 OMX_U32 rate; 2232 if (isVideo) { 2233 if (rateFloat > 65535) { 2234 return BAD_VALUE; 2235 } 2236 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2237 } else { 2238 if (rateFloat > UINT_MAX) { 2239 return BAD_VALUE; 2240 } 2241 rate = (OMX_U32)(rateFloat); 2242 } 2243 OMX_PARAM_U32TYPE config; 2244 InitOMXParams(&config); 2245 config.nU32 = rate; 2246 status_t err = mOMXNode->setConfig( 2247 (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2248 &config, sizeof(config)); 2249 if (err != OK) { 2250 ALOGI("codec does not support config operating rate (err %d)", err); 2251 } 2252 return OK; 2253} 2254 2255status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2256 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2257 InitOMXParams(¶ms); 2258 params.nPortIndex = kPortIndexOutput; 2259 status_t err = mOMXNode->getConfig( 2260 (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2261 if (err == OK) { 2262 *intraRefreshPeriod = params.nRefreshPeriod; 2263 return OK; 2264 } 2265 2266 // Fallback to query through standard OMX index. 2267 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2268 InitOMXParams(&refreshParams); 2269 refreshParams.nPortIndex = kPortIndexOutput; 2270 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2271 err = mOMXNode->getParameter( 2272 OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2273 if (err != OK || refreshParams.nCirMBs == 0) { 2274 *intraRefreshPeriod = 0; 2275 return OK; 2276 } 2277 2278 // Calculate period based on width and height 2279 uint32_t width, height; 2280 OMX_PARAM_PORTDEFINITIONTYPE def; 2281 InitOMXParams(&def); 2282 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2283 def.nPortIndex = kPortIndexOutput; 2284 err = mOMXNode->getParameter( 2285 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2286 if (err != OK) { 2287 *intraRefreshPeriod = 0; 2288 return err; 2289 } 2290 width = video_def->nFrameWidth; 2291 height = video_def->nFrameHeight; 2292 // Use H.264/AVC MacroBlock size 16x16 2293 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2294 2295 return OK; 2296} 2297 2298status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2299 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2300 InitOMXParams(¶ms); 2301 params.nPortIndex = kPortIndexOutput; 2302 params.nRefreshPeriod = intraRefreshPeriod; 2303 status_t err = mOMXNode->setConfig( 2304 (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2305 if (err == OK) { 2306 return OK; 2307 } 2308 2309 // Only in configure state, a component could invoke setParameter. 2310 if (!inConfigure) { 2311 return INVALID_OPERATION; 2312 } else { 2313 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2314 } 2315 2316 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2317 InitOMXParams(&refreshParams); 2318 refreshParams.nPortIndex = kPortIndexOutput; 2319 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2320 2321 if (intraRefreshPeriod == 0) { 2322 // 0 means disable intra refresh. 2323 refreshParams.nCirMBs = 0; 2324 } else { 2325 // Calculate macroblocks that need to be intra coded base on width and height 2326 uint32_t width, height; 2327 OMX_PARAM_PORTDEFINITIONTYPE def; 2328 InitOMXParams(&def); 2329 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2330 def.nPortIndex = kPortIndexOutput; 2331 err = mOMXNode->getParameter( 2332 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2333 if (err != OK) { 2334 return err; 2335 } 2336 width = video_def->nFrameWidth; 2337 height = video_def->nFrameHeight; 2338 // Use H.264/AVC MacroBlock size 16x16 2339 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2340 } 2341 2342 err = mOMXNode->setParameter( 2343 OMX_IndexParamVideoIntraRefresh, 2344 &refreshParams, sizeof(refreshParams)); 2345 if (err != OK) { 2346 return err; 2347 } 2348 2349 return OK; 2350} 2351 2352status_t ACodec::configureTemporalLayers( 2353 const sp<AMessage> &msg, bool inConfigure, sp<AMessage> &outputFormat) { 2354 if (!mIsVideo || !mIsEncoder) { 2355 return INVALID_OPERATION; 2356 } 2357 2358 AString tsSchema; 2359 if (!msg->findString("ts-schema", &tsSchema)) { 2360 return OK; 2361 } 2362 2363 unsigned int numLayers = 0; 2364 unsigned int numBLayers = 0; 2365 int tags; 2366 char dummy; 2367 OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE pattern = 2368 OMX_VIDEO_AndroidTemporalLayeringPatternNone; 2369 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1 2370 && numLayers > 0) { 2371 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC; 2372 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c", 2373 &numLayers, &dummy, &numBLayers, &dummy)) 2374 && (tags == 1 || (tags == 3 && dummy == '+')) 2375 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) { 2376 numLayers += numBLayers; 2377 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid; 2378 } else { 2379 ALOGI("Ignoring unsupported ts-schema [%s]", tsSchema.c_str()); 2380 return BAD_VALUE; 2381 } 2382 2383 OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layerParams; 2384 InitOMXParams(&layerParams); 2385 layerParams.nPortIndex = kPortIndexOutput; 2386 2387 status_t err = mOMXNode->getParameter( 2388 (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2389 &layerParams, sizeof(layerParams)); 2390 2391 if (err != OK) { 2392 return err; 2393 } else if (!(layerParams.eSupportedPatterns & pattern)) { 2394 return BAD_VALUE; 2395 } 2396 2397 numLayers = min(numLayers, layerParams.nLayerCountMax); 2398 numBLayers = min(numBLayers, layerParams.nBLayerCountMax); 2399 2400 if (!inConfigure) { 2401 OMX_VIDEO_CONFIG_ANDROID_TEMPORALLAYERINGTYPE layerConfig; 2402 InitOMXParams(&layerConfig); 2403 layerConfig.nPortIndex = kPortIndexOutput; 2404 layerConfig.ePattern = pattern; 2405 layerConfig.nPLayerCountActual = numLayers - numBLayers; 2406 layerConfig.nBLayerCountActual = numBLayers; 2407 layerConfig.bBitrateRatiosSpecified = OMX_FALSE; 2408 2409 err = mOMXNode->setConfig( 2410 (OMX_INDEXTYPE)OMX_IndexConfigAndroidVideoTemporalLayering, 2411 &layerConfig, sizeof(layerConfig)); 2412 } else { 2413 layerParams.ePattern = pattern; 2414 layerParams.nPLayerCountActual = numLayers - numBLayers; 2415 layerParams.nBLayerCountActual = numBLayers; 2416 layerParams.bBitrateRatiosSpecified = OMX_FALSE; 2417 2418 err = mOMXNode->setParameter( 2419 (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2420 &layerParams, sizeof(layerParams)); 2421 } 2422 2423 AString configSchema; 2424 if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternAndroid) { 2425 configSchema = AStringPrintf("android.generic.%u+%u", numLayers - numBLayers, numBLayers); 2426 } else if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC) { 2427 configSchema = AStringPrintf("webrtc.vp8.%u", numLayers); 2428 } 2429 2430 if (err != OK) { 2431 ALOGW("Failed to set temporal layers to %s (requested %s)", 2432 configSchema.c_str(), tsSchema.c_str()); 2433 return err; 2434 } 2435 2436 err = mOMXNode->getParameter( 2437 (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2438 &layerParams, sizeof(layerParams)); 2439 2440 if (err == OK) { 2441 ALOGD("Temporal layers requested:%s configured:%s got:%s(%u: P=%u, B=%u)", 2442 tsSchema.c_str(), configSchema.c_str(), 2443 asString(layerParams.ePattern), layerParams.ePattern, 2444 layerParams.nPLayerCountActual, layerParams.nBLayerCountActual); 2445 2446 if (outputFormat.get() == mOutputFormat.get()) { 2447 mOutputFormat = mOutputFormat->dup(); // trigger an output format change event 2448 } 2449 // assume we got what we configured 2450 outputFormat->setString("ts-schema", configSchema); 2451 } 2452 return err; 2453} 2454 2455status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2456 OMX_PARAM_PORTDEFINITIONTYPE def; 2457 InitOMXParams(&def); 2458 def.nPortIndex = portIndex; 2459 2460 status_t err = mOMXNode->getParameter( 2461 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2462 2463 if (err != OK) { 2464 return err; 2465 } 2466 2467 if (def.nBufferSize >= size) { 2468 return OK; 2469 } 2470 2471 def.nBufferSize = size; 2472 2473 err = mOMXNode->setParameter( 2474 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2475 2476 if (err != OK) { 2477 return err; 2478 } 2479 2480 err = mOMXNode->getParameter( 2481 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2482 2483 if (err != OK) { 2484 return err; 2485 } 2486 2487 if (def.nBufferSize < size) { 2488 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2489 return FAILED_TRANSACTION; 2490 } 2491 2492 return OK; 2493} 2494 2495status_t ACodec::selectAudioPortFormat( 2496 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2497 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2498 InitOMXParams(&format); 2499 2500 format.nPortIndex = portIndex; 2501 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2502 format.nIndex = index; 2503 status_t err = mOMXNode->getParameter( 2504 OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2505 2506 if (err != OK) { 2507 return err; 2508 } 2509 2510 if (format.eEncoding == desiredFormat) { 2511 break; 2512 } 2513 2514 if (index == kMaxIndicesToCheck) { 2515 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2516 mComponentName.c_str(), index, 2517 asString(format.eEncoding), format.eEncoding); 2518 return ERROR_UNSUPPORTED; 2519 } 2520 } 2521 2522 return mOMXNode->setParameter( 2523 OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2524} 2525 2526status_t ACodec::setupAACCodec( 2527 bool encoder, int32_t numChannels, int32_t sampleRate, 2528 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2529 int32_t maxOutputChannelCount, const drcParams_t& drc, 2530 int32_t pcmLimiterEnable) { 2531 if (encoder && isADTS) { 2532 return -EINVAL; 2533 } 2534 2535 status_t err = setupRawAudioFormat( 2536 encoder ? kPortIndexInput : kPortIndexOutput, 2537 sampleRate, 2538 numChannels); 2539 2540 if (err != OK) { 2541 return err; 2542 } 2543 2544 if (encoder) { 2545 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2546 2547 if (err != OK) { 2548 return err; 2549 } 2550 2551 OMX_PARAM_PORTDEFINITIONTYPE def; 2552 InitOMXParams(&def); 2553 def.nPortIndex = kPortIndexOutput; 2554 2555 err = mOMXNode->getParameter( 2556 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2557 2558 if (err != OK) { 2559 return err; 2560 } 2561 2562 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2563 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2564 2565 err = mOMXNode->setParameter( 2566 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2567 2568 if (err != OK) { 2569 return err; 2570 } 2571 2572 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2573 InitOMXParams(&profile); 2574 profile.nPortIndex = kPortIndexOutput; 2575 2576 err = mOMXNode->getParameter( 2577 OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2578 2579 if (err != OK) { 2580 return err; 2581 } 2582 2583 profile.nChannels = numChannels; 2584 2585 profile.eChannelMode = 2586 (numChannels == 1) 2587 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2588 2589 profile.nSampleRate = sampleRate; 2590 profile.nBitRate = bitRate; 2591 profile.nAudioBandWidth = 0; 2592 profile.nFrameLength = 0; 2593 profile.nAACtools = OMX_AUDIO_AACToolAll; 2594 profile.nAACERtools = OMX_AUDIO_AACERNone; 2595 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2596 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2597 switch (sbrMode) { 2598 case 0: 2599 // disable sbr 2600 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2601 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2602 break; 2603 case 1: 2604 // enable single-rate sbr 2605 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2606 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2607 break; 2608 case 2: 2609 // enable dual-rate sbr 2610 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2611 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2612 break; 2613 case -1: 2614 // enable both modes -> the codec will decide which mode should be used 2615 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2616 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2617 break; 2618 default: 2619 // unsupported sbr mode 2620 return BAD_VALUE; 2621 } 2622 2623 2624 err = mOMXNode->setParameter( 2625 OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2626 2627 if (err != OK) { 2628 return err; 2629 } 2630 2631 return err; 2632 } 2633 2634 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2635 InitOMXParams(&profile); 2636 profile.nPortIndex = kPortIndexInput; 2637 2638 err = mOMXNode->getParameter( 2639 OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2640 2641 if (err != OK) { 2642 return err; 2643 } 2644 2645 profile.nChannels = numChannels; 2646 profile.nSampleRate = sampleRate; 2647 2648 profile.eAACStreamFormat = 2649 isADTS 2650 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2651 : OMX_AUDIO_AACStreamFormatMP4FF; 2652 2653 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2654 InitOMXParams(&presentation); 2655 presentation.nMaxOutputChannels = maxOutputChannelCount; 2656 presentation.nDrcCut = drc.drcCut; 2657 presentation.nDrcBoost = drc.drcBoost; 2658 presentation.nHeavyCompression = drc.heavyCompression; 2659 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2660 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2661 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2662 2663 status_t res = mOMXNode->setParameter( 2664 OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2665 if (res == OK) { 2666 // optional parameters, will not cause configuration failure 2667 mOMXNode->setParameter( 2668 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2669 &presentation, sizeof(presentation)); 2670 } else { 2671 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2672 } 2673 mSampleRate = sampleRate; 2674 return res; 2675} 2676 2677status_t ACodec::setupAC3Codec( 2678 bool encoder, int32_t numChannels, int32_t sampleRate) { 2679 status_t err = setupRawAudioFormat( 2680 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2681 2682 if (err != OK) { 2683 return err; 2684 } 2685 2686 if (encoder) { 2687 ALOGW("AC3 encoding is not supported."); 2688 return INVALID_OPERATION; 2689 } 2690 2691 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2692 InitOMXParams(&def); 2693 def.nPortIndex = kPortIndexInput; 2694 2695 err = mOMXNode->getParameter( 2696 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, &def, sizeof(def)); 2697 2698 if (err != OK) { 2699 return err; 2700 } 2701 2702 def.nChannels = numChannels; 2703 def.nSampleRate = sampleRate; 2704 2705 return mOMXNode->setParameter( 2706 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, &def, sizeof(def)); 2707} 2708 2709status_t ACodec::setupEAC3Codec( 2710 bool encoder, int32_t numChannels, int32_t sampleRate) { 2711 status_t err = setupRawAudioFormat( 2712 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2713 2714 if (err != OK) { 2715 return err; 2716 } 2717 2718 if (encoder) { 2719 ALOGW("EAC3 encoding is not supported."); 2720 return INVALID_OPERATION; 2721 } 2722 2723 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2724 InitOMXParams(&def); 2725 def.nPortIndex = kPortIndexInput; 2726 2727 err = mOMXNode->getParameter( 2728 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, &def, sizeof(def)); 2729 2730 if (err != OK) { 2731 return err; 2732 } 2733 2734 def.nChannels = numChannels; 2735 def.nSampleRate = sampleRate; 2736 2737 return mOMXNode->setParameter( 2738 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, &def, sizeof(def)); 2739} 2740 2741static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2742 bool isAMRWB, int32_t bps) { 2743 if (isAMRWB) { 2744 if (bps <= 6600) { 2745 return OMX_AUDIO_AMRBandModeWB0; 2746 } else if (bps <= 8850) { 2747 return OMX_AUDIO_AMRBandModeWB1; 2748 } else if (bps <= 12650) { 2749 return OMX_AUDIO_AMRBandModeWB2; 2750 } else if (bps <= 14250) { 2751 return OMX_AUDIO_AMRBandModeWB3; 2752 } else if (bps <= 15850) { 2753 return OMX_AUDIO_AMRBandModeWB4; 2754 } else if (bps <= 18250) { 2755 return OMX_AUDIO_AMRBandModeWB5; 2756 } else if (bps <= 19850) { 2757 return OMX_AUDIO_AMRBandModeWB6; 2758 } else if (bps <= 23050) { 2759 return OMX_AUDIO_AMRBandModeWB7; 2760 } 2761 2762 // 23850 bps 2763 return OMX_AUDIO_AMRBandModeWB8; 2764 } else { // AMRNB 2765 if (bps <= 4750) { 2766 return OMX_AUDIO_AMRBandModeNB0; 2767 } else if (bps <= 5150) { 2768 return OMX_AUDIO_AMRBandModeNB1; 2769 } else if (bps <= 5900) { 2770 return OMX_AUDIO_AMRBandModeNB2; 2771 } else if (bps <= 6700) { 2772 return OMX_AUDIO_AMRBandModeNB3; 2773 } else if (bps <= 7400) { 2774 return OMX_AUDIO_AMRBandModeNB4; 2775 } else if (bps <= 7950) { 2776 return OMX_AUDIO_AMRBandModeNB5; 2777 } else if (bps <= 10200) { 2778 return OMX_AUDIO_AMRBandModeNB6; 2779 } 2780 2781 // 12200 bps 2782 return OMX_AUDIO_AMRBandModeNB7; 2783 } 2784} 2785 2786status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2787 OMX_AUDIO_PARAM_AMRTYPE def; 2788 InitOMXParams(&def); 2789 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2790 2791 status_t err = mOMXNode->getParameter( 2792 OMX_IndexParamAudioAmr, &def, sizeof(def)); 2793 2794 if (err != OK) { 2795 return err; 2796 } 2797 2798 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2799 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2800 2801 err = mOMXNode->setParameter( 2802 OMX_IndexParamAudioAmr, &def, sizeof(def)); 2803 2804 if (err != OK) { 2805 return err; 2806 } 2807 2808 return setupRawAudioFormat( 2809 encoder ? kPortIndexInput : kPortIndexOutput, 2810 isWAMR ? 16000 : 8000 /* sampleRate */, 2811 1 /* numChannels */); 2812} 2813 2814status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2815 if (encoder) { 2816 return INVALID_OPERATION; 2817 } 2818 2819 return setupRawAudioFormat( 2820 kPortIndexInput, sampleRate, numChannels); 2821} 2822 2823status_t ACodec::setupFlacCodec( 2824 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2825 2826 if (encoder) { 2827 OMX_AUDIO_PARAM_FLACTYPE def; 2828 InitOMXParams(&def); 2829 def.nPortIndex = kPortIndexOutput; 2830 2831 // configure compression level 2832 status_t err = mOMXNode->getParameter(OMX_IndexParamAudioFlac, &def, sizeof(def)); 2833 if (err != OK) { 2834 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2835 return err; 2836 } 2837 def.nCompressionLevel = compressionLevel; 2838 err = mOMXNode->setParameter(OMX_IndexParamAudioFlac, &def, sizeof(def)); 2839 if (err != OK) { 2840 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2841 return err; 2842 } 2843 } 2844 2845 return setupRawAudioFormat( 2846 encoder ? kPortIndexInput : kPortIndexOutput, 2847 sampleRate, 2848 numChannels); 2849} 2850 2851status_t ACodec::setupRawAudioFormat( 2852 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { 2853 OMX_PARAM_PORTDEFINITIONTYPE def; 2854 InitOMXParams(&def); 2855 def.nPortIndex = portIndex; 2856 2857 status_t err = mOMXNode->getParameter( 2858 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2859 2860 if (err != OK) { 2861 return err; 2862 } 2863 2864 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2865 2866 err = mOMXNode->setParameter( 2867 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2868 2869 if (err != OK) { 2870 return err; 2871 } 2872 2873 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2874 InitOMXParams(&pcmParams); 2875 pcmParams.nPortIndex = portIndex; 2876 2877 err = mOMXNode->getParameter( 2878 OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2879 2880 if (err != OK) { 2881 return err; 2882 } 2883 2884 pcmParams.nChannels = numChannels; 2885 switch (encoding) { 2886 case kAudioEncodingPcm8bit: 2887 pcmParams.eNumData = OMX_NumericalDataUnsigned; 2888 pcmParams.nBitPerSample = 8; 2889 break; 2890 case kAudioEncodingPcmFloat: 2891 pcmParams.eNumData = OMX_NumericalDataFloat; 2892 pcmParams.nBitPerSample = 32; 2893 break; 2894 case kAudioEncodingPcm16bit: 2895 pcmParams.eNumData = OMX_NumericalDataSigned; 2896 pcmParams.nBitPerSample = 16; 2897 break; 2898 default: 2899 return BAD_VALUE; 2900 } 2901 pcmParams.bInterleaved = OMX_TRUE; 2902 pcmParams.nSamplingRate = sampleRate; 2903 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 2904 2905 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 2906 return OMX_ErrorNone; 2907 } 2908 2909 err = mOMXNode->setParameter( 2910 OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2911 // if we could not set up raw format to non-16-bit, try with 16-bit 2912 // NOTE: we will also verify this via readback, in case codec ignores these fields 2913 if (err != OK && encoding != kAudioEncodingPcm16bit) { 2914 pcmParams.eNumData = OMX_NumericalDataSigned; 2915 pcmParams.nBitPerSample = 16; 2916 err = mOMXNode->setParameter( 2917 OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2918 } 2919 return err; 2920} 2921 2922status_t ACodec::configureTunneledVideoPlayback( 2923 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 2924 native_handle_t* sidebandHandle; 2925 2926 status_t err = mOMXNode->configureVideoTunnelMode( 2927 kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 2928 if (err != OK) { 2929 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 2930 return err; 2931 } 2932 2933 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 2934 if (err != OK) { 2935 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 2936 sidebandHandle, err); 2937 return err; 2938 } 2939 2940 return OK; 2941} 2942 2943status_t ACodec::setVideoPortFormatType( 2944 OMX_U32 portIndex, 2945 OMX_VIDEO_CODINGTYPE compressionFormat, 2946 OMX_COLOR_FORMATTYPE colorFormat, 2947 bool usingNativeBuffers) { 2948 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2949 InitOMXParams(&format); 2950 format.nPortIndex = portIndex; 2951 format.nIndex = 0; 2952 bool found = false; 2953 2954 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2955 format.nIndex = index; 2956 status_t err = mOMXNode->getParameter( 2957 OMX_IndexParamVideoPortFormat, 2958 &format, sizeof(format)); 2959 2960 if (err != OK) { 2961 return err; 2962 } 2963 2964 // substitute back flexible color format to codec supported format 2965 OMX_U32 flexibleEquivalent; 2966 if (compressionFormat == OMX_VIDEO_CodingUnused 2967 && IsFlexibleColorFormat( 2968 mOMXNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 2969 && colorFormat == flexibleEquivalent) { 2970 ALOGI("[%s] using color format %#x in place of %#x", 2971 mComponentName.c_str(), format.eColorFormat, colorFormat); 2972 colorFormat = format.eColorFormat; 2973 } 2974 2975 // The following assertion is violated by TI's video decoder. 2976 // CHECK_EQ(format.nIndex, index); 2977 2978 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 2979 if (portIndex == kPortIndexInput 2980 && colorFormat == format.eColorFormat) { 2981 // eCompressionFormat does not seem right. 2982 found = true; 2983 break; 2984 } 2985 if (portIndex == kPortIndexOutput 2986 && compressionFormat == format.eCompressionFormat) { 2987 // eColorFormat does not seem right. 2988 found = true; 2989 break; 2990 } 2991 } 2992 2993 if (format.eCompressionFormat == compressionFormat 2994 && format.eColorFormat == colorFormat) { 2995 found = true; 2996 break; 2997 } 2998 2999 if (index == kMaxIndicesToCheck) { 3000 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 3001 mComponentName.c_str(), index, 3002 asString(format.eCompressionFormat), format.eCompressionFormat, 3003 asString(format.eColorFormat), format.eColorFormat); 3004 } 3005 } 3006 3007 if (!found) { 3008 return UNKNOWN_ERROR; 3009 } 3010 3011 status_t err = mOMXNode->setParameter( 3012 OMX_IndexParamVideoPortFormat, &format, sizeof(format)); 3013 3014 return err; 3015} 3016 3017// Set optimal output format. OMX component lists output formats in the order 3018// of preference, but this got more complicated since the introduction of flexible 3019// YUV formats. We support a legacy behavior for applications that do not use 3020// surface output, do not specify an output format, but expect a "usable" standard 3021// OMX format. SW readable and standard formats must be flex-YUV. 3022// 3023// Suggested preference order: 3024// - optimal format for texture rendering (mediaplayer behavior) 3025// - optimal SW readable & texture renderable format (flex-YUV support) 3026// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 3027// - legacy "usable" standard formats 3028// 3029// For legacy support, we prefer a standard format, but will settle for a SW readable 3030// flex-YUV format. 3031status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 3032 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 3033 InitOMXParams(&format); 3034 format.nPortIndex = kPortIndexOutput; 3035 3036 InitOMXParams(&legacyFormat); 3037 // this field will change when we find a suitable legacy format 3038 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 3039 3040 for (OMX_U32 index = 0; ; ++index) { 3041 format.nIndex = index; 3042 status_t err = mOMXNode->getParameter( 3043 OMX_IndexParamVideoPortFormat, &format, sizeof(format)); 3044 if (err != OK) { 3045 // no more formats, pick legacy format if found 3046 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 3047 memcpy(&format, &legacyFormat, sizeof(format)); 3048 break; 3049 } 3050 return err; 3051 } 3052 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 3053 return OMX_ErrorBadParameter; 3054 } 3055 if (!getLegacyFlexibleFormat) { 3056 break; 3057 } 3058 // standard formats that were exposed to users before 3059 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 3060 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 3061 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 3062 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 3063 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 3064 break; 3065 } 3066 // find best legacy non-standard format 3067 OMX_U32 flexibleEquivalent; 3068 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 3069 && IsFlexibleColorFormat( 3070 mOMXNode, format.eColorFormat, false /* usingNativeBuffers */, 3071 &flexibleEquivalent) 3072 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 3073 memcpy(&legacyFormat, &format, sizeof(format)); 3074 } 3075 } 3076 return mOMXNode->setParameter( 3077 OMX_IndexParamVideoPortFormat, &format, sizeof(format)); 3078} 3079 3080static const struct VideoCodingMapEntry { 3081 const char *mMime; 3082 OMX_VIDEO_CODINGTYPE mVideoCodingType; 3083} kVideoCodingMapEntry[] = { 3084 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 3085 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 3086 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 3087 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 3088 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 3089 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 3090 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 3091 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 3092}; 3093 3094static status_t GetVideoCodingTypeFromMime( 3095 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3096 for (size_t i = 0; 3097 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3098 ++i) { 3099 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3100 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3101 return OK; 3102 } 3103 } 3104 3105 *codingType = OMX_VIDEO_CodingUnused; 3106 3107 return ERROR_UNSUPPORTED; 3108} 3109 3110static status_t GetMimeTypeForVideoCoding( 3111 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3112 for (size_t i = 0; 3113 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3114 ++i) { 3115 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3116 *mime = kVideoCodingMapEntry[i].mMime; 3117 return OK; 3118 } 3119 } 3120 3121 mime->clear(); 3122 3123 return ERROR_UNSUPPORTED; 3124} 3125 3126status_t ACodec::setPortBufferNum(OMX_U32 portIndex, int bufferNum) { 3127 OMX_PARAM_PORTDEFINITIONTYPE def; 3128 InitOMXParams(&def); 3129 def.nPortIndex = portIndex; 3130 status_t err; 3131 ALOGD("Setting [%s] %s port buffer number: %d", mComponentName.c_str(), 3132 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3133 err = mOMXNode->getParameter( 3134 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3135 if (err != OK) { 3136 return err; 3137 } 3138 def.nBufferCountActual = bufferNum; 3139 err = mOMXNode->setParameter( 3140 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3141 if (err != OK) { 3142 // Component could reject this request. 3143 ALOGW("Fail to set [%s] %s port buffer number: %d", mComponentName.c_str(), 3144 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3145 } 3146 return OK; 3147} 3148 3149status_t ACodec::setupVideoDecoder( 3150 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3151 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3152 int32_t width, height; 3153 if (!msg->findInt32("width", &width) 3154 || !msg->findInt32("height", &height)) { 3155 return INVALID_OPERATION; 3156 } 3157 3158 OMX_VIDEO_CODINGTYPE compressionFormat; 3159 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3160 3161 if (err != OK) { 3162 return err; 3163 } 3164 3165 if (compressionFormat == OMX_VIDEO_CodingVP9) { 3166 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 3167 InitOMXParams(¶ms); 3168 params.nPortIndex = kPortIndexInput; 3169 // Check if VP9 decoder advertises supported profiles. 3170 params.nProfileIndex = 0; 3171 status_t err = mOMXNode->getParameter( 3172 OMX_IndexParamVideoProfileLevelQuerySupported, 3173 ¶ms, sizeof(params)); 3174 mIsLegacyVP9Decoder = err != OK; 3175 } 3176 3177 err = setVideoPortFormatType( 3178 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3179 3180 if (err != OK) { 3181 return err; 3182 } 3183 3184 int32_t tmp; 3185 if (msg->findInt32("color-format", &tmp)) { 3186 OMX_COLOR_FORMATTYPE colorFormat = 3187 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3188 err = setVideoPortFormatType( 3189 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3190 if (err != OK) { 3191 ALOGW("[%s] does not support color format %d", 3192 mComponentName.c_str(), colorFormat); 3193 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3194 } 3195 } else { 3196 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3197 } 3198 3199 if (err != OK) { 3200 return err; 3201 } 3202 3203 // Set the component input buffer number to be |tmp|. If succeed, 3204 // component will set input port buffer number to be |tmp|. If fail, 3205 // component will keep the same buffer number as before. 3206 if (msg->findInt32("android._num-input-buffers", &tmp)) { 3207 err = setPortBufferNum(kPortIndexInput, tmp); 3208 if (err != OK) 3209 return err; 3210 } 3211 3212 // Set the component output buffer number to be |tmp|. If succeed, 3213 // component will set output port buffer number to be |tmp|. If fail, 3214 // component will keep the same buffer number as before. 3215 if (msg->findInt32("android._num-output-buffers", &tmp)) { 3216 err = setPortBufferNum(kPortIndexOutput, tmp); 3217 if (err != OK) 3218 return err; 3219 } 3220 3221 int32_t frameRateInt; 3222 float frameRateFloat; 3223 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3224 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3225 frameRateInt = -1; 3226 } 3227 frameRateFloat = (float)frameRateInt; 3228 } 3229 3230 err = setVideoFormatOnPort( 3231 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3232 3233 if (err != OK) { 3234 return err; 3235 } 3236 3237 err = setVideoFormatOnPort( 3238 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3239 3240 if (err != OK) { 3241 return err; 3242 } 3243 3244 err = setColorAspectsForVideoDecoder( 3245 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3246 if (err == ERROR_UNSUPPORTED) { // support is optional 3247 err = OK; 3248 } 3249 3250 if (err != OK) { 3251 return err; 3252 } 3253 3254 err = setHDRStaticInfoForVideoCodec(kPortIndexOutput, msg, outputFormat); 3255 if (err == ERROR_UNSUPPORTED) { // support is optional 3256 err = OK; 3257 } 3258 return err; 3259} 3260 3261status_t ACodec::initDescribeColorAspectsIndex() { 3262 status_t err = mOMXNode->getExtensionIndex( 3263 "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3264 if (err != OK) { 3265 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3266 } 3267 return err; 3268} 3269 3270status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3271 status_t err = ERROR_UNSUPPORTED; 3272 if (mDescribeColorAspectsIndex) { 3273 err = mOMXNode->setConfig(mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3274 } 3275 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3276 mComponentName.c_str(), 3277 params.sAspects.mRange, asString(params.sAspects.mRange), 3278 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3279 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3280 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3281 err, asString(err)); 3282 3283 if (verify && err == OK) { 3284 err = getCodecColorAspects(params); 3285 } 3286 3287 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3288 "[%s] setting color aspects failed even though codec advertises support", 3289 mComponentName.c_str()); 3290 return err; 3291} 3292 3293status_t ACodec::setColorAspectsForVideoDecoder( 3294 int32_t width, int32_t height, bool usingNativeWindow, 3295 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3296 DescribeColorAspectsParams params; 3297 InitOMXParams(¶ms); 3298 params.nPortIndex = kPortIndexOutput; 3299 3300 getColorAspectsFromFormat(configFormat, params.sAspects); 3301 if (usingNativeWindow) { 3302 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3303 // The default aspects will be set back to the output format during the 3304 // getFormat phase of configure(). Set non-Unspecified values back into the 3305 // format, in case component does not support this enumeration. 3306 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3307 } 3308 3309 (void)initDescribeColorAspectsIndex(); 3310 3311 // communicate color aspects to codec 3312 return setCodecColorAspects(params); 3313} 3314 3315status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3316 status_t err = ERROR_UNSUPPORTED; 3317 if (mDescribeColorAspectsIndex) { 3318 err = mOMXNode->getConfig(mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3319 } 3320 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3321 mComponentName.c_str(), 3322 params.sAspects.mRange, asString(params.sAspects.mRange), 3323 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3324 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3325 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3326 err, asString(err)); 3327 if (params.bRequestingDataSpace) { 3328 ALOGV("for dataspace %#x", params.nDataSpace); 3329 } 3330 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3331 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3332 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3333 mComponentName.c_str()); 3334 } 3335 return err; 3336} 3337 3338status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3339 DescribeColorAspectsParams params; 3340 InitOMXParams(¶ms); 3341 params.nPortIndex = kPortIndexInput; 3342 status_t err = getCodecColorAspects(params); 3343 if (err == OK) { 3344 // we only set encoder input aspects if codec supports them 3345 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3346 } 3347 return err; 3348} 3349 3350status_t ACodec::getDataSpace( 3351 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3352 bool tryCodec) { 3353 status_t err = OK; 3354 if (tryCodec) { 3355 // request dataspace guidance from codec. 3356 params.bRequestingDataSpace = OMX_TRUE; 3357 err = getCodecColorAspects(params); 3358 params.bRequestingDataSpace = OMX_FALSE; 3359 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3360 *dataSpace = (android_dataspace)params.nDataSpace; 3361 return err; 3362 } else if (err == ERROR_UNSUPPORTED) { 3363 // ignore not-implemented error for dataspace requests 3364 err = OK; 3365 } 3366 } 3367 3368 // this returns legacy versions if available 3369 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3370 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3371 "and dataspace %#x", 3372 mComponentName.c_str(), 3373 params.sAspects.mRange, asString(params.sAspects.mRange), 3374 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3375 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3376 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3377 *dataSpace); 3378 return err; 3379} 3380 3381 3382status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3383 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3384 android_dataspace *dataSpace) { 3385 DescribeColorAspectsParams params; 3386 InitOMXParams(¶ms); 3387 params.nPortIndex = kPortIndexOutput; 3388 3389 // reset default format and get resulting format 3390 getColorAspectsFromFormat(configFormat, params.sAspects); 3391 if (dataSpace != NULL) { 3392 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3393 } 3394 status_t err = setCodecColorAspects(params, true /* readBack */); 3395 3396 // we always set specified aspects for decoders 3397 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3398 3399 if (dataSpace != NULL) { 3400 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3401 if (err == OK) { 3402 err = res; 3403 } 3404 } 3405 3406 return err; 3407} 3408 3409// initial video encoder setup for bytebuffer mode 3410status_t ACodec::setColorAspectsForVideoEncoder( 3411 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3412 // copy config to output format as this is not exposed via getFormat 3413 copyColorConfig(configFormat, outputFormat); 3414 3415 DescribeColorAspectsParams params; 3416 InitOMXParams(¶ms); 3417 params.nPortIndex = kPortIndexInput; 3418 getColorAspectsFromFormat(configFormat, params.sAspects); 3419 3420 (void)initDescribeColorAspectsIndex(); 3421 3422 int32_t usingRecorder; 3423 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3424 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3425 int32_t width, height; 3426 if (configFormat->findInt32("width", &width) 3427 && configFormat->findInt32("height", &height)) { 3428 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3429 status_t err = getDataSpace( 3430 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3431 if (err != OK) { 3432 return err; 3433 } 3434 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3435 } 3436 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3437 } 3438 3439 // communicate color aspects to codec, but do not allow change of the platform aspects 3440 ColorAspects origAspects = params.sAspects; 3441 for (int triesLeft = 2; --triesLeft >= 0; ) { 3442 status_t err = setCodecColorAspects(params, true /* readBack */); 3443 if (err != OK 3444 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3445 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3446 return err; 3447 } 3448 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3449 mComponentName.c_str()); 3450 } 3451 return OK; 3452} 3453 3454status_t ACodec::setHDRStaticInfoForVideoCodec( 3455 OMX_U32 portIndex, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3456 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3457 3458 DescribeHDRStaticInfoParams params; 3459 InitOMXParams(¶ms); 3460 params.nPortIndex = portIndex; 3461 3462 HDRStaticInfo *info = ¶ms.sInfo; 3463 if (getHDRStaticInfoFromFormat(configFormat, info)) { 3464 setHDRStaticInfoIntoFormat(params.sInfo, outputFormat); 3465 } 3466 3467 (void)initDescribeHDRStaticInfoIndex(); 3468 3469 // communicate HDR static Info to codec 3470 return setHDRStaticInfo(params); 3471} 3472 3473// subsequent initial video encoder setup for surface mode 3474status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3475 android_dataspace *dataSpace /* nonnull */) { 3476 DescribeColorAspectsParams params; 3477 InitOMXParams(¶ms); 3478 params.nPortIndex = kPortIndexInput; 3479 ColorAspects &aspects = params.sAspects; 3480 3481 // reset default format and store resulting format into both input and output formats 3482 getColorAspectsFromFormat(mConfigFormat, aspects); 3483 int32_t width, height; 3484 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3485 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3486 } 3487 setColorAspectsIntoFormat(aspects, mInputFormat); 3488 setColorAspectsIntoFormat(aspects, mOutputFormat); 3489 3490 // communicate color aspects to codec, but do not allow any change 3491 ColorAspects origAspects = aspects; 3492 status_t err = OK; 3493 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3494 status_t err = setCodecColorAspects(params, true /* readBack */); 3495 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3496 break; 3497 } 3498 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3499 mComponentName.c_str()); 3500 } 3501 3502 *dataSpace = HAL_DATASPACE_BT709; 3503 aspects = origAspects; // restore desired color aspects 3504 status_t res = getDataSpace( 3505 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3506 if (err == OK) { 3507 err = res; 3508 } 3509 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3510 mInputFormat->setBuffer( 3511 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3512 3513 // update input format with codec supported color aspects (basically set unsupported 3514 // aspects to Unspecified) 3515 if (err == OK) { 3516 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3517 } 3518 3519 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3520 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3521 3522 return err; 3523} 3524 3525status_t ACodec::getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format) { 3526 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3527 DescribeHDRStaticInfoParams params; 3528 InitOMXParams(¶ms); 3529 params.nPortIndex = portIndex; 3530 3531 status_t err = getHDRStaticInfo(params); 3532 if (err == OK) { 3533 // we only set decodec output HDRStaticInfo if codec supports them 3534 setHDRStaticInfoIntoFormat(params.sInfo, format); 3535 } 3536 return err; 3537} 3538 3539status_t ACodec::initDescribeHDRStaticInfoIndex() { 3540 status_t err = mOMXNode->getExtensionIndex( 3541 "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex); 3542 if (err != OK) { 3543 mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0; 3544 } 3545 return err; 3546} 3547 3548status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) { 3549 status_t err = ERROR_UNSUPPORTED; 3550 if (mDescribeHDRStaticInfoIndex) { 3551 err = mOMXNode->setConfig(mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3552 } 3553 3554 const HDRStaticInfo *info = ¶ms.sInfo; 3555 ALOGV("[%s] setting HDRStaticInfo (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, " 3556 "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)", 3557 mComponentName.c_str(), 3558 info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y, 3559 info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y, 3560 info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance, 3561 info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel); 3562 3563 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3564 "[%s] setting HDRStaticInfo failed even though codec advertises support", 3565 mComponentName.c_str()); 3566 return err; 3567} 3568 3569status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms) { 3570 status_t err = ERROR_UNSUPPORTED; 3571 if (mDescribeHDRStaticInfoIndex) { 3572 err = mOMXNode->getConfig(mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3573 } 3574 3575 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3576 "[%s] getting HDRStaticInfo failed even though codec advertises support", 3577 mComponentName.c_str()); 3578 return err; 3579} 3580 3581status_t ACodec::setupVideoEncoder( 3582 const char *mime, const sp<AMessage> &msg, 3583 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3584 int32_t tmp; 3585 if (!msg->findInt32("color-format", &tmp)) { 3586 return INVALID_OPERATION; 3587 } 3588 3589 OMX_COLOR_FORMATTYPE colorFormat = 3590 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3591 3592 status_t err = setVideoPortFormatType( 3593 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3594 3595 if (err != OK) { 3596 ALOGE("[%s] does not support color format %d", 3597 mComponentName.c_str(), colorFormat); 3598 3599 return err; 3600 } 3601 3602 /* Input port configuration */ 3603 3604 OMX_PARAM_PORTDEFINITIONTYPE def; 3605 InitOMXParams(&def); 3606 3607 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3608 3609 def.nPortIndex = kPortIndexInput; 3610 3611 err = mOMXNode->getParameter( 3612 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3613 3614 if (err != OK) { 3615 return err; 3616 } 3617 3618 int32_t width, height, bitrate; 3619 if (!msg->findInt32("width", &width) 3620 || !msg->findInt32("height", &height) 3621 || !msg->findInt32("bitrate", &bitrate)) { 3622 return INVALID_OPERATION; 3623 } 3624 3625 video_def->nFrameWidth = width; 3626 video_def->nFrameHeight = height; 3627 3628 int32_t stride; 3629 if (!msg->findInt32("stride", &stride)) { 3630 stride = width; 3631 } 3632 3633 video_def->nStride = stride; 3634 3635 int32_t sliceHeight; 3636 if (!msg->findInt32("slice-height", &sliceHeight)) { 3637 sliceHeight = height; 3638 } 3639 3640 video_def->nSliceHeight = sliceHeight; 3641 3642 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3643 3644 float frameRate; 3645 if (!msg->findFloat("frame-rate", &frameRate)) { 3646 int32_t tmp; 3647 if (!msg->findInt32("frame-rate", &tmp)) { 3648 return INVALID_OPERATION; 3649 } 3650 frameRate = (float)tmp; 3651 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3652 } 3653 3654 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3655 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3656 // this is redundant as it was already set up in setVideoPortFormatType 3657 // FIXME for now skip this only for flexible YUV formats 3658 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3659 video_def->eColorFormat = colorFormat; 3660 } 3661 3662 err = mOMXNode->setParameter( 3663 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3664 3665 if (err != OK) { 3666 ALOGE("[%s] failed to set input port definition parameters.", 3667 mComponentName.c_str()); 3668 3669 return err; 3670 } 3671 3672 /* Output port configuration */ 3673 3674 OMX_VIDEO_CODINGTYPE compressionFormat; 3675 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3676 3677 if (err != OK) { 3678 return err; 3679 } 3680 3681 err = setVideoPortFormatType( 3682 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3683 3684 if (err != OK) { 3685 ALOGE("[%s] does not support compression format %d", 3686 mComponentName.c_str(), compressionFormat); 3687 3688 return err; 3689 } 3690 3691 def.nPortIndex = kPortIndexOutput; 3692 3693 err = mOMXNode->getParameter( 3694 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3695 3696 if (err != OK) { 3697 return err; 3698 } 3699 3700 video_def->nFrameWidth = width; 3701 video_def->nFrameHeight = height; 3702 video_def->xFramerate = 0; 3703 video_def->nBitrate = bitrate; 3704 video_def->eCompressionFormat = compressionFormat; 3705 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3706 3707 err = mOMXNode->setParameter( 3708 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3709 3710 if (err != OK) { 3711 ALOGE("[%s] failed to set output port definition parameters.", 3712 mComponentName.c_str()); 3713 3714 return err; 3715 } 3716 3717 int32_t intraRefreshPeriod = 0; 3718 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3719 && intraRefreshPeriod >= 0) { 3720 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3721 if (err != OK) { 3722 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3723 mComponentName.c_str()); 3724 err = OK; 3725 } 3726 } 3727 3728 switch (compressionFormat) { 3729 case OMX_VIDEO_CodingMPEG4: 3730 err = setupMPEG4EncoderParameters(msg); 3731 break; 3732 3733 case OMX_VIDEO_CodingH263: 3734 err = setupH263EncoderParameters(msg); 3735 break; 3736 3737 case OMX_VIDEO_CodingAVC: 3738 err = setupAVCEncoderParameters(msg); 3739 break; 3740 3741 case OMX_VIDEO_CodingHEVC: 3742 err = setupHEVCEncoderParameters(msg); 3743 break; 3744 3745 case OMX_VIDEO_CodingVP8: 3746 case OMX_VIDEO_CodingVP9: 3747 err = setupVPXEncoderParameters(msg, outputFormat); 3748 break; 3749 3750 default: 3751 break; 3752 } 3753 3754 if (err != OK) { 3755 return err; 3756 } 3757 3758 // Set up color aspects on input, but propagate them to the output format, as they will 3759 // not be read back from encoder. 3760 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3761 if (err == ERROR_UNSUPPORTED) { 3762 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3763 err = OK; 3764 } 3765 3766 if (err != OK) { 3767 return err; 3768 } 3769 3770 err = setHDRStaticInfoForVideoCodec(kPortIndexInput, msg, outputFormat); 3771 if (err == ERROR_UNSUPPORTED) { // support is optional 3772 ALOGI("[%s] cannot encode HDR static metadata. Ignoring.", mComponentName.c_str()); 3773 err = OK; 3774 } 3775 3776 if (err != OK) { 3777 return err; 3778 } 3779 3780 switch (compressionFormat) { 3781 case OMX_VIDEO_CodingAVC: 3782 case OMX_VIDEO_CodingHEVC: 3783 err = configureTemporalLayers(msg, true /* inConfigure */, outputFormat); 3784 if (err != OK) { 3785 err = OK; // ignore failure 3786 } 3787 break; 3788 3789 case OMX_VIDEO_CodingVP8: 3790 case OMX_VIDEO_CodingVP9: 3791 // TODO: do we need to support android.generic layering? webrtc layering is 3792 // already set up in setupVPXEncoderParameters. 3793 break; 3794 3795 default: 3796 break; 3797 } 3798 3799 if (err == OK) { 3800 ALOGI("setupVideoEncoder succeeded"); 3801 } 3802 3803 return err; 3804} 3805 3806status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3807 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3808 InitOMXParams(¶ms); 3809 params.nPortIndex = kPortIndexOutput; 3810 3811 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3812 3813 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3814 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3815 int32_t mbs; 3816 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3817 return INVALID_OPERATION; 3818 } 3819 params.nCirMBs = mbs; 3820 } 3821 3822 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3823 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3824 int32_t mbs; 3825 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3826 return INVALID_OPERATION; 3827 } 3828 params.nAirMBs = mbs; 3829 3830 int32_t ref; 3831 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3832 return INVALID_OPERATION; 3833 } 3834 params.nAirRef = ref; 3835 } 3836 3837 status_t err = mOMXNode->setParameter( 3838 OMX_IndexParamVideoIntraRefresh, ¶ms, sizeof(params)); 3839 return err; 3840} 3841 3842static OMX_U32 setPFramesSpacing( 3843 float iFramesInterval /* seconds */, int32_t frameRate, uint32_t BFramesSpacing = 0) { 3844 // BFramesSpacing is the number of B frames between I/P frames 3845 // PFramesSpacing (the value to be returned) is the number of P frames between I frames 3846 // 3847 // keyFrameInterval = ((PFramesSpacing + 1) * BFramesSpacing) + PFramesSpacing + 1 3848 // ^^^ ^^^ ^^^ 3849 // number of B frames number of P I frame 3850 // 3851 // = (PFramesSpacing + 1) * (BFramesSpacing + 1) 3852 // 3853 // E.g. 3854 // I P I : I-interval: 8, nPFrames 1, nBFrames 3 3855 // BBB BBB 3856 3857 if (iFramesInterval < 0) { // just 1 key frame 3858 return 0xFFFFFFFE; // don't use maxint as key-frame-interval calculation will add 1 3859 } else if (iFramesInterval == 0) { // just key frames 3860 return 0; 3861 } 3862 3863 // round down as key-frame-interval is an upper limit 3864 uint32_t keyFrameInterval = uint32_t(frameRate * iFramesInterval); 3865 OMX_U32 ret = keyFrameInterval / (BFramesSpacing + 1); 3866 return ret > 0 ? ret - 1 : 0; 3867} 3868 3869static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 3870 int32_t tmp; 3871 if (!msg->findInt32("bitrate-mode", &tmp)) { 3872 return OMX_Video_ControlRateVariable; 3873 } 3874 3875 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 3876} 3877 3878status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 3879 int32_t bitrate; 3880 float iFrameInterval; 3881 if (!msg->findInt32("bitrate", &bitrate) 3882 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 3883 return INVALID_OPERATION; 3884 } 3885 3886 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3887 3888 float frameRate; 3889 if (!msg->findFloat("frame-rate", &frameRate)) { 3890 int32_t tmp; 3891 if (!msg->findInt32("frame-rate", &tmp)) { 3892 return INVALID_OPERATION; 3893 } 3894 frameRate = (float)tmp; 3895 } 3896 3897 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 3898 InitOMXParams(&mpeg4type); 3899 mpeg4type.nPortIndex = kPortIndexOutput; 3900 3901 status_t err = mOMXNode->getParameter( 3902 OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3903 3904 if (err != OK) { 3905 return err; 3906 } 3907 3908 mpeg4type.nSliceHeaderSpacing = 0; 3909 mpeg4type.bSVH = OMX_FALSE; 3910 mpeg4type.bGov = OMX_FALSE; 3911 3912 mpeg4type.nAllowedPictureTypes = 3913 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3914 3915 mpeg4type.nBFrames = 0; 3916 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, mpeg4type.nBFrames); 3917 if (mpeg4type.nPFrames == 0) { 3918 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3919 } 3920 mpeg4type.nIDCVLCThreshold = 0; 3921 mpeg4type.bACPred = OMX_TRUE; 3922 mpeg4type.nMaxPacketSize = 256; 3923 mpeg4type.nTimeIncRes = 1000; 3924 mpeg4type.nHeaderExtension = 0; 3925 mpeg4type.bReversibleVLC = OMX_FALSE; 3926 3927 int32_t profile; 3928 if (msg->findInt32("profile", &profile)) { 3929 int32_t level; 3930 if (!msg->findInt32("level", &level)) { 3931 return INVALID_OPERATION; 3932 } 3933 3934 err = verifySupportForProfileAndLevel(profile, level); 3935 3936 if (err != OK) { 3937 return err; 3938 } 3939 3940 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 3941 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 3942 } 3943 3944 err = mOMXNode->setParameter( 3945 OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3946 3947 if (err != OK) { 3948 return err; 3949 } 3950 3951 err = configureBitrate(bitrate, bitrateMode); 3952 3953 if (err != OK) { 3954 return err; 3955 } 3956 3957 return setupErrorCorrectionParameters(); 3958} 3959 3960status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 3961 int32_t bitrate; 3962 float iFrameInterval; 3963 if (!msg->findInt32("bitrate", &bitrate) 3964 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 3965 return INVALID_OPERATION; 3966 } 3967 3968 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3969 3970 float frameRate; 3971 if (!msg->findFloat("frame-rate", &frameRate)) { 3972 int32_t tmp; 3973 if (!msg->findInt32("frame-rate", &tmp)) { 3974 return INVALID_OPERATION; 3975 } 3976 frameRate = (float)tmp; 3977 } 3978 3979 OMX_VIDEO_PARAM_H263TYPE h263type; 3980 InitOMXParams(&h263type); 3981 h263type.nPortIndex = kPortIndexOutput; 3982 3983 status_t err = mOMXNode->getParameter( 3984 OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3985 3986 if (err != OK) { 3987 return err; 3988 } 3989 3990 h263type.nAllowedPictureTypes = 3991 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3992 3993 h263type.nBFrames = 0; 3994 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h263type.nBFrames); 3995 if (h263type.nPFrames == 0) { 3996 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3997 } 3998 3999 int32_t profile; 4000 if (msg->findInt32("profile", &profile)) { 4001 int32_t level; 4002 if (!msg->findInt32("level", &level)) { 4003 return INVALID_OPERATION; 4004 } 4005 4006 err = verifySupportForProfileAndLevel(profile, level); 4007 4008 if (err != OK) { 4009 return err; 4010 } 4011 4012 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 4013 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 4014 } 4015 4016 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 4017 h263type.bForceRoundingTypeToZero = OMX_FALSE; 4018 h263type.nPictureHeaderRepetition = 0; 4019 h263type.nGOBHeaderInterval = 0; 4020 4021 err = mOMXNode->setParameter( 4022 OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 4023 4024 if (err != OK) { 4025 return err; 4026 } 4027 4028 err = configureBitrate(bitrate, bitrateMode); 4029 4030 if (err != OK) { 4031 return err; 4032 } 4033 4034 return setupErrorCorrectionParameters(); 4035} 4036 4037// static 4038int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 4039 int width, int height, int rate, int bitrate, 4040 OMX_VIDEO_AVCPROFILETYPE profile) { 4041 // convert bitrate to main/baseline profile kbps equivalent 4042 switch (profile) { 4043 case OMX_VIDEO_AVCProfileHigh10: 4044 bitrate = divUp(bitrate, 3000); break; 4045 case OMX_VIDEO_AVCProfileHigh: 4046 bitrate = divUp(bitrate, 1250); break; 4047 default: 4048 bitrate = divUp(bitrate, 1000); break; 4049 } 4050 4051 // convert size and rate to MBs 4052 width = divUp(width, 16); 4053 height = divUp(height, 16); 4054 int mbs = width * height; 4055 rate *= mbs; 4056 int maxDimension = max(width, height); 4057 4058 static const int limits[][5] = { 4059 /* MBps MB dim bitrate level */ 4060 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 4061 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 4062 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 4063 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 4064 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 4065 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 4066 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 4067 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 4068 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 4069 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 4070 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 4071 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 4072 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 4073 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 4074 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 4075 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 4076 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 4077 }; 4078 4079 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 4080 const int (&limit)[5] = limits[i]; 4081 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 4082 && bitrate <= limit[3]) { 4083 return limit[4]; 4084 } 4085 } 4086 return 0; 4087} 4088 4089status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 4090 int32_t bitrate; 4091 float iFrameInterval; 4092 if (!msg->findInt32("bitrate", &bitrate) 4093 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 4094 return INVALID_OPERATION; 4095 } 4096 4097 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4098 4099 float frameRate; 4100 if (!msg->findFloat("frame-rate", &frameRate)) { 4101 int32_t tmp; 4102 if (!msg->findInt32("frame-rate", &tmp)) { 4103 return INVALID_OPERATION; 4104 } 4105 frameRate = (float)tmp; 4106 } 4107 4108 status_t err = OK; 4109 int32_t intraRefreshMode = 0; 4110 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 4111 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 4112 if (err != OK) { 4113 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 4114 err, intraRefreshMode); 4115 return err; 4116 } 4117 } 4118 4119 OMX_VIDEO_PARAM_AVCTYPE h264type; 4120 InitOMXParams(&h264type); 4121 h264type.nPortIndex = kPortIndexOutput; 4122 4123 err = mOMXNode->getParameter( 4124 OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4125 4126 if (err != OK) { 4127 return err; 4128 } 4129 4130 h264type.nAllowedPictureTypes = 4131 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4132 4133 int32_t profile; 4134 if (msg->findInt32("profile", &profile)) { 4135 int32_t level; 4136 if (!msg->findInt32("level", &level)) { 4137 return INVALID_OPERATION; 4138 } 4139 4140 err = verifySupportForProfileAndLevel(profile, level); 4141 4142 if (err != OK) { 4143 return err; 4144 } 4145 4146 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 4147 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 4148 } else { 4149 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 4150#if 0 /* DON'T YET DEFAULT TO HIGHEST PROFILE */ 4151 // Use largest supported profile for AVC recording if profile is not specified. 4152 for (OMX_VIDEO_AVCPROFILETYPE profile : { 4153 OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCProfileMain }) { 4154 if (verifySupportForProfileAndLevel(profile, 0) == OK) { 4155 h264type.eProfile = profile; 4156 break; 4157 } 4158 } 4159#endif 4160 } 4161 4162 ALOGI("setupAVCEncoderParameters with [profile: %s] [level: %s]", 4163 asString(h264type.eProfile), asString(h264type.eLevel)); 4164 4165 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 4166 h264type.nSliceHeaderSpacing = 0; 4167 h264type.bUseHadamard = OMX_TRUE; 4168 h264type.nRefFrames = 1; 4169 h264type.nBFrames = 0; 4170 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4171 if (h264type.nPFrames == 0) { 4172 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4173 } 4174 h264type.nRefIdx10ActiveMinus1 = 0; 4175 h264type.nRefIdx11ActiveMinus1 = 0; 4176 h264type.bEntropyCodingCABAC = OMX_FALSE; 4177 h264type.bWeightedPPrediction = OMX_FALSE; 4178 h264type.bconstIpred = OMX_FALSE; 4179 h264type.bDirect8x8Inference = OMX_FALSE; 4180 h264type.bDirectSpatialTemporal = OMX_FALSE; 4181 h264type.nCabacInitIdc = 0; 4182 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 4183 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 4184 h264type.nSliceHeaderSpacing = 0; 4185 h264type.bUseHadamard = OMX_TRUE; 4186 h264type.nRefFrames = 2; 4187 h264type.nBFrames = 1; 4188 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4189 h264type.nAllowedPictureTypes = 4190 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 4191 h264type.nRefIdx10ActiveMinus1 = 0; 4192 h264type.nRefIdx11ActiveMinus1 = 0; 4193 h264type.bEntropyCodingCABAC = OMX_TRUE; 4194 h264type.bWeightedPPrediction = OMX_TRUE; 4195 h264type.bconstIpred = OMX_TRUE; 4196 h264type.bDirect8x8Inference = OMX_TRUE; 4197 h264type.bDirectSpatialTemporal = OMX_TRUE; 4198 h264type.nCabacInitIdc = 1; 4199 } 4200 4201 if (h264type.nBFrames != 0) { 4202 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 4203 } 4204 4205 h264type.bEnableUEP = OMX_FALSE; 4206 h264type.bEnableFMO = OMX_FALSE; 4207 h264type.bEnableASO = OMX_FALSE; 4208 h264type.bEnableRS = OMX_FALSE; 4209 h264type.bFrameMBsOnly = OMX_TRUE; 4210 h264type.bMBAFF = OMX_FALSE; 4211 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 4212 4213 err = mOMXNode->setParameter( 4214 OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4215 4216 if (err != OK) { 4217 return err; 4218 } 4219 4220 // TRICKY: if we are enabling temporal layering as well, some codecs may not support layering 4221 // when B-frames are enabled. Detect this now so we can disable B frames if temporal layering 4222 // is preferred. 4223 AString tsSchema; 4224 int32_t preferBFrames = (int32_t)false; 4225 if (msg->findString("ts-schema", &tsSchema) 4226 && (!msg->findInt32("android._prefer-b-frames", &preferBFrames) || !preferBFrames)) { 4227 OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layering; 4228 InitOMXParams(&layering); 4229 layering.nPortIndex = kPortIndexOutput; 4230 if (mOMXNode->getParameter( 4231 (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 4232 &layering, sizeof(layering)) == OK 4233 && layering.eSupportedPatterns 4234 && layering.nBLayerCountMax == 0) { 4235 h264type.nBFrames = 0; 4236 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4237 h264type.nAllowedPictureTypes &= ~OMX_VIDEO_PictureTypeB; 4238 ALOGI("disabling B-frames"); 4239 err = mOMXNode->setParameter( 4240 OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4241 4242 if (err != OK) { 4243 return err; 4244 } 4245 } 4246 } 4247 4248 return configureBitrate(bitrate, bitrateMode); 4249} 4250 4251status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 4252 int32_t bitrate; 4253 float iFrameInterval; 4254 if (!msg->findInt32("bitrate", &bitrate) 4255 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 4256 return INVALID_OPERATION; 4257 } 4258 4259 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4260 4261 float frameRate; 4262 if (!msg->findFloat("frame-rate", &frameRate)) { 4263 int32_t tmp; 4264 if (!msg->findInt32("frame-rate", &tmp)) { 4265 return INVALID_OPERATION; 4266 } 4267 frameRate = (float)tmp; 4268 } 4269 4270 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 4271 InitOMXParams(&hevcType); 4272 hevcType.nPortIndex = kPortIndexOutput; 4273 4274 status_t err = OK; 4275 err = mOMXNode->getParameter( 4276 (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4277 if (err != OK) { 4278 return err; 4279 } 4280 4281 int32_t profile; 4282 if (msg->findInt32("profile", &profile)) { 4283 int32_t level; 4284 if (!msg->findInt32("level", &level)) { 4285 return INVALID_OPERATION; 4286 } 4287 4288 err = verifySupportForProfileAndLevel(profile, level); 4289 if (err != OK) { 4290 return err; 4291 } 4292 4293 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 4294 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 4295 } 4296 // TODO: finer control? 4297 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1; 4298 4299 err = mOMXNode->setParameter( 4300 (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4301 if (err != OK) { 4302 return err; 4303 } 4304 4305 return configureBitrate(bitrate, bitrateMode); 4306} 4307 4308status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg, sp<AMessage> &outputFormat) { 4309 int32_t bitrate; 4310 float iFrameInterval = 0; 4311 size_t tsLayers = 0; 4312 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 4313 OMX_VIDEO_VPXTemporalLayerPatternNone; 4314 static const uint32_t kVp8LayerRateAlloction 4315 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 4316 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 4317 {100, 100, 100}, // 1 layer 4318 { 60, 100, 100}, // 2 layers {60%, 40%} 4319 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 4320 }; 4321 if (!msg->findInt32("bitrate", &bitrate)) { 4322 return INVALID_OPERATION; 4323 } 4324 msg->findAsFloat("i-frame-interval", &iFrameInterval); 4325 4326 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4327 4328 float frameRate; 4329 if (!msg->findFloat("frame-rate", &frameRate)) { 4330 int32_t tmp; 4331 if (!msg->findInt32("frame-rate", &tmp)) { 4332 return INVALID_OPERATION; 4333 } 4334 frameRate = (float)tmp; 4335 } 4336 4337 AString tsSchema; 4338 OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE tsType = 4339 OMX_VIDEO_AndroidTemporalLayeringPatternNone; 4340 4341 if (msg->findString("ts-schema", &tsSchema)) { 4342 unsigned int numLayers = 0; 4343 unsigned int numBLayers = 0; 4344 int tags; 4345 char dummy; 4346 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1 4347 && numLayers > 0) { 4348 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4349 tsType = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC; 4350 tsLayers = numLayers; 4351 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c", 4352 &numLayers, &dummy, &numBLayers, &dummy)) 4353 && (tags == 1 || (tags == 3 && dummy == '+')) 4354 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) { 4355 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4356 // VPX does not have a concept of B-frames, so just count all layers 4357 tsType = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid; 4358 tsLayers = numLayers + numBLayers; 4359 } else { 4360 ALOGW("Ignoring unsupported ts-schema [%s]", tsSchema.c_str()); 4361 } 4362 tsLayers = min(tsLayers, (size_t)OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS); 4363 } 4364 4365 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4366 InitOMXParams(&vp8type); 4367 vp8type.nPortIndex = kPortIndexOutput; 4368 status_t err = mOMXNode->getParameter( 4369 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4370 &vp8type, sizeof(vp8type)); 4371 4372 if (err == OK) { 4373 if (iFrameInterval > 0) { 4374 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1; 4375 } 4376 vp8type.eTemporalPattern = pattern; 4377 vp8type.nTemporalLayerCount = tsLayers; 4378 if (tsLayers > 0) { 4379 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4380 vp8type.nTemporalLayerBitrateRatio[i] = 4381 kVp8LayerRateAlloction[tsLayers - 1][i]; 4382 } 4383 } 4384 if (bitrateMode == OMX_Video_ControlRateConstant) { 4385 vp8type.nMinQuantizer = 2; 4386 vp8type.nMaxQuantizer = 63; 4387 } 4388 4389 err = mOMXNode->setParameter( 4390 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4391 &vp8type, sizeof(vp8type)); 4392 if (err != OK) { 4393 ALOGW("Extended VP8 parameters set failed: %d", err); 4394 } else if (tsType == OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC) { 4395 // advertise even single layer WebRTC layering, as it is defined 4396 outputFormat->setString("ts-schema", AStringPrintf("webrtc.vp8.%u-layer", tsLayers)); 4397 } else if (tsLayers > 0) { 4398 // tsType == OMX_VIDEO_AndroidTemporalLayeringPatternAndroid 4399 outputFormat->setString("ts-schema", AStringPrintf("android.generic.%u", tsLayers)); 4400 } 4401 } 4402 4403 return configureBitrate(bitrate, bitrateMode); 4404} 4405 4406status_t ACodec::verifySupportForProfileAndLevel( 4407 int32_t profile, int32_t level) { 4408 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4409 InitOMXParams(¶ms); 4410 params.nPortIndex = kPortIndexOutput; 4411 4412 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4413 params.nProfileIndex = index; 4414 status_t err = mOMXNode->getParameter( 4415 OMX_IndexParamVideoProfileLevelQuerySupported, 4416 ¶ms, sizeof(params)); 4417 4418 if (err != OK) { 4419 return err; 4420 } 4421 4422 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4423 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4424 4425 if (profile == supportedProfile && level <= supportedLevel) { 4426 return OK; 4427 } 4428 4429 if (index == kMaxIndicesToCheck) { 4430 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4431 mComponentName.c_str(), index, 4432 params.eProfile, params.eLevel); 4433 } 4434 } 4435 return ERROR_UNSUPPORTED; 4436} 4437 4438status_t ACodec::configureBitrate( 4439 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4440 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4441 InitOMXParams(&bitrateType); 4442 bitrateType.nPortIndex = kPortIndexOutput; 4443 4444 status_t err = mOMXNode->getParameter( 4445 OMX_IndexParamVideoBitrate, &bitrateType, sizeof(bitrateType)); 4446 4447 if (err != OK) { 4448 return err; 4449 } 4450 4451 bitrateType.eControlRate = bitrateMode; 4452 bitrateType.nTargetBitrate = bitrate; 4453 4454 return mOMXNode->setParameter( 4455 OMX_IndexParamVideoBitrate, &bitrateType, sizeof(bitrateType)); 4456} 4457 4458status_t ACodec::setupErrorCorrectionParameters() { 4459 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4460 InitOMXParams(&errorCorrectionType); 4461 errorCorrectionType.nPortIndex = kPortIndexOutput; 4462 4463 status_t err = mOMXNode->getParameter( 4464 OMX_IndexParamVideoErrorCorrection, 4465 &errorCorrectionType, sizeof(errorCorrectionType)); 4466 4467 if (err != OK) { 4468 return OK; // Optional feature. Ignore this failure 4469 } 4470 4471 errorCorrectionType.bEnableHEC = OMX_FALSE; 4472 errorCorrectionType.bEnableResync = OMX_TRUE; 4473 errorCorrectionType.nResynchMarkerSpacing = 256; 4474 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4475 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4476 4477 return mOMXNode->setParameter( 4478 OMX_IndexParamVideoErrorCorrection, 4479 &errorCorrectionType, sizeof(errorCorrectionType)); 4480} 4481 4482status_t ACodec::setVideoFormatOnPort( 4483 OMX_U32 portIndex, 4484 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4485 float frameRate) { 4486 OMX_PARAM_PORTDEFINITIONTYPE def; 4487 InitOMXParams(&def); 4488 def.nPortIndex = portIndex; 4489 4490 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4491 4492 status_t err = mOMXNode->getParameter( 4493 OMX_IndexParamPortDefinition, &def, sizeof(def)); 4494 if (err != OK) { 4495 return err; 4496 } 4497 4498 if (portIndex == kPortIndexInput) { 4499 // XXX Need a (much) better heuristic to compute input buffer sizes. 4500 const size_t X = 64 * 1024; 4501 if (def.nBufferSize < X) { 4502 def.nBufferSize = X; 4503 } 4504 } 4505 4506 if (def.eDomain != OMX_PortDomainVideo) { 4507 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4508 return FAILED_TRANSACTION; 4509 } 4510 4511 video_def->nFrameWidth = width; 4512 video_def->nFrameHeight = height; 4513 4514 if (portIndex == kPortIndexInput) { 4515 video_def->eCompressionFormat = compressionFormat; 4516 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4517 if (frameRate >= 0) { 4518 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4519 } 4520 } 4521 4522 err = mOMXNode->setParameter( 4523 OMX_IndexParamPortDefinition, &def, sizeof(def)); 4524 4525 return err; 4526} 4527 4528status_t ACodec::initNativeWindow() { 4529 if (mNativeWindow != NULL) { 4530 return mOMXNode->enableNativeBuffers(kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4531 } 4532 4533 mOMXNode->enableNativeBuffers(kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4534 return OK; 4535} 4536 4537size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4538 size_t n = 0; 4539 4540 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4541 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4542 4543 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4544 ++n; 4545 } 4546 } 4547 4548 return n; 4549} 4550 4551size_t ACodec::countBuffersOwnedByNativeWindow() const { 4552 size_t n = 0; 4553 4554 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4555 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4556 4557 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4558 ++n; 4559 } 4560 } 4561 4562 return n; 4563} 4564 4565void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4566 if (mNativeWindow == NULL) { 4567 return; 4568 } 4569 4570 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4571 && dequeueBufferFromNativeWindow() != NULL) { 4572 // these buffers will be submitted as regular buffers; account for this 4573 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4574 --mMetadataBuffersToSubmit; 4575 } 4576 } 4577} 4578 4579bool ACodec::allYourBuffersAreBelongToUs( 4580 OMX_U32 portIndex) { 4581 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4582 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4583 4584 if (info->mStatus != BufferInfo::OWNED_BY_US 4585 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4586 ALOGV("[%s] Buffer %u on port %u still has status %d", 4587 mComponentName.c_str(), 4588 info->mBufferID, portIndex, info->mStatus); 4589 return false; 4590 } 4591 } 4592 4593 return true; 4594} 4595 4596bool ACodec::allYourBuffersAreBelongToUs() { 4597 return allYourBuffersAreBelongToUs(kPortIndexInput) 4598 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4599} 4600 4601void ACodec::deferMessage(const sp<AMessage> &msg) { 4602 mDeferredQueue.push_back(msg); 4603} 4604 4605void ACodec::processDeferredMessages() { 4606 List<sp<AMessage> > queue = mDeferredQueue; 4607 mDeferredQueue.clear(); 4608 4609 List<sp<AMessage> >::iterator it = queue.begin(); 4610 while (it != queue.end()) { 4611 onMessageReceived(*it++); 4612 } 4613} 4614 4615status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4616 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4617 OMX_PARAM_PORTDEFINITIONTYPE def; 4618 InitOMXParams(&def); 4619 def.nPortIndex = portIndex; 4620 4621 status_t err = mOMXNode->getParameter(OMX_IndexParamPortDefinition, &def, sizeof(def)); 4622 if (err != OK) { 4623 return err; 4624 } 4625 4626 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4627 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4628 return BAD_VALUE; 4629 } 4630 4631 switch (def.eDomain) { 4632 case OMX_PortDomainVideo: 4633 { 4634 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4635 switch ((int)videoDef->eCompressionFormat) { 4636 case OMX_VIDEO_CodingUnused: 4637 { 4638 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4639 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4640 4641 notify->setInt32("stride", videoDef->nStride); 4642 notify->setInt32("slice-height", videoDef->nSliceHeight); 4643 notify->setInt32("color-format", videoDef->eColorFormat); 4644 4645 if (mNativeWindow == NULL) { 4646 DescribeColorFormat2Params describeParams; 4647 InitOMXParams(&describeParams); 4648 describeParams.eColorFormat = videoDef->eColorFormat; 4649 describeParams.nFrameWidth = videoDef->nFrameWidth; 4650 describeParams.nFrameHeight = videoDef->nFrameHeight; 4651 describeParams.nStride = videoDef->nStride; 4652 describeParams.nSliceHeight = videoDef->nSliceHeight; 4653 describeParams.bUsingNativeBuffers = OMX_FALSE; 4654 4655 if (DescribeColorFormat(mOMXNode, describeParams)) { 4656 notify->setBuffer( 4657 "image-data", 4658 ABuffer::CreateAsCopy( 4659 &describeParams.sMediaImage, 4660 sizeof(describeParams.sMediaImage))); 4661 4662 MediaImage2 &img = describeParams.sMediaImage; 4663 MediaImage2::PlaneInfo *plane = img.mPlane; 4664 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4665 mComponentName.c_str(), img.mWidth, img.mHeight, 4666 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4667 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4668 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4669 } 4670 } 4671 4672 int32_t width = (int32_t)videoDef->nFrameWidth; 4673 int32_t height = (int32_t)videoDef->nFrameHeight; 4674 4675 if (portIndex == kPortIndexOutput) { 4676 OMX_CONFIG_RECTTYPE rect; 4677 InitOMXParams(&rect); 4678 rect.nPortIndex = portIndex; 4679 4680 if (mOMXNode->getConfig( 4681 (portIndex == kPortIndexOutput ? 4682 OMX_IndexConfigCommonOutputCrop : 4683 OMX_IndexConfigCommonInputCrop), 4684 &rect, sizeof(rect)) != OK) { 4685 rect.nLeft = 0; 4686 rect.nTop = 0; 4687 rect.nWidth = videoDef->nFrameWidth; 4688 rect.nHeight = videoDef->nFrameHeight; 4689 } 4690 4691 if (rect.nLeft < 0 || 4692 rect.nTop < 0 || 4693 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4694 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4695 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 4696 rect.nLeft, rect.nTop, 4697 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 4698 videoDef->nFrameWidth, videoDef->nFrameHeight); 4699 return BAD_VALUE; 4700 } 4701 4702 notify->setRect( 4703 "crop", 4704 rect.nLeft, 4705 rect.nTop, 4706 rect.nLeft + rect.nWidth - 1, 4707 rect.nTop + rect.nHeight - 1); 4708 4709 width = rect.nWidth; 4710 height = rect.nHeight; 4711 4712 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4713 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4714 width, height, mConfigFormat, notify, 4715 mUsingNativeWindow ? &dataSpace : NULL); 4716 if (mUsingNativeWindow) { 4717 notify->setInt32("android._dataspace", dataSpace); 4718 } 4719 (void)getHDRStaticInfoForVideoCodec(kPortIndexOutput, notify); 4720 } else { 4721 (void)getInputColorAspectsForVideoEncoder(notify); 4722 if (mConfigFormat->contains("hdr-static-info")) { 4723 (void)getHDRStaticInfoForVideoCodec(kPortIndexInput, notify); 4724 } 4725 } 4726 4727 break; 4728 } 4729 4730 case OMX_VIDEO_CodingVP8: 4731 case OMX_VIDEO_CodingVP9: 4732 { 4733 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4734 InitOMXParams(&vp8type); 4735 vp8type.nPortIndex = kPortIndexOutput; 4736 status_t err = mOMXNode->getParameter( 4737 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4738 &vp8type, 4739 sizeof(vp8type)); 4740 4741 if (err == OK) { 4742 if (vp8type.eTemporalPattern == OMX_VIDEO_VPXTemporalLayerPatternWebRTC 4743 && vp8type.nTemporalLayerCount > 0 4744 && vp8type.nTemporalLayerCount 4745 <= OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS) { 4746 // advertise as android.generic if we configured for android.generic 4747 AString origSchema; 4748 if (notify->findString("ts-schema", &origSchema) 4749 && origSchema.startsWith("android.generic")) { 4750 notify->setString("ts-schema", AStringPrintf( 4751 "android.generic.%u", vp8type.nTemporalLayerCount)); 4752 } else { 4753 notify->setString("ts-schema", AStringPrintf( 4754 "webrtc.vp8.%u-layer", vp8type.nTemporalLayerCount)); 4755 } 4756 } 4757 } 4758 // Fall through to set up mime. 4759 } 4760 4761 default: 4762 { 4763 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 4764 // should be CodingUnused 4765 ALOGE("Raw port video compression format is %s(%d)", 4766 asString(videoDef->eCompressionFormat), 4767 videoDef->eCompressionFormat); 4768 return BAD_VALUE; 4769 } 4770 AString mime; 4771 if (GetMimeTypeForVideoCoding( 4772 videoDef->eCompressionFormat, &mime) != OK) { 4773 notify->setString("mime", "application/octet-stream"); 4774 } else { 4775 notify->setString("mime", mime.c_str()); 4776 } 4777 uint32_t intraRefreshPeriod = 0; 4778 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 4779 && intraRefreshPeriod > 0) { 4780 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 4781 } 4782 break; 4783 } 4784 } 4785 notify->setInt32("width", videoDef->nFrameWidth); 4786 notify->setInt32("height", videoDef->nFrameHeight); 4787 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 4788 portIndex == kPortIndexInput ? "input" : "output", 4789 notify->debugString().c_str()); 4790 4791 break; 4792 } 4793 4794 case OMX_PortDomainAudio: 4795 { 4796 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 4797 4798 switch ((int)audioDef->eEncoding) { 4799 case OMX_AUDIO_CodingPCM: 4800 { 4801 OMX_AUDIO_PARAM_PCMMODETYPE params; 4802 InitOMXParams(¶ms); 4803 params.nPortIndex = portIndex; 4804 4805 err = mOMXNode->getParameter( 4806 OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4807 if (err != OK) { 4808 return err; 4809 } 4810 4811 if (params.nChannels <= 0 4812 || (params.nChannels != 1 && !params.bInterleaved) 4813 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 4814 ALOGE("unsupported PCM port: %u channels%s, %u-bit", 4815 params.nChannels, 4816 params.bInterleaved ? " interleaved" : "", 4817 params.nBitPerSample); 4818 return FAILED_TRANSACTION; 4819 } 4820 4821 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 4822 notify->setInt32("channel-count", params.nChannels); 4823 notify->setInt32("sample-rate", params.nSamplingRate); 4824 4825 AudioEncoding encoding = kAudioEncodingPcm16bit; 4826 if (params.eNumData == OMX_NumericalDataUnsigned 4827 && params.nBitPerSample == 8u) { 4828 encoding = kAudioEncodingPcm8bit; 4829 } else if (params.eNumData == OMX_NumericalDataFloat 4830 && params.nBitPerSample == 32u) { 4831 encoding = kAudioEncodingPcmFloat; 4832 } else if (params.nBitPerSample != 16u 4833 || params.eNumData != OMX_NumericalDataSigned) { 4834 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", 4835 asString(params.eNumData), params.eNumData, 4836 asString(params.ePCMMode), params.ePCMMode); 4837 return FAILED_TRANSACTION; 4838 } 4839 notify->setInt32("pcm-encoding", encoding); 4840 4841 if (mChannelMaskPresent) { 4842 notify->setInt32("channel-mask", mChannelMask); 4843 } 4844 break; 4845 } 4846 4847 case OMX_AUDIO_CodingAAC: 4848 { 4849 OMX_AUDIO_PARAM_AACPROFILETYPE params; 4850 InitOMXParams(¶ms); 4851 params.nPortIndex = portIndex; 4852 4853 err = mOMXNode->getParameter( 4854 OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 4855 if (err != OK) { 4856 return err; 4857 } 4858 4859 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 4860 notify->setInt32("channel-count", params.nChannels); 4861 notify->setInt32("sample-rate", params.nSampleRate); 4862 break; 4863 } 4864 4865 case OMX_AUDIO_CodingAMR: 4866 { 4867 OMX_AUDIO_PARAM_AMRTYPE params; 4868 InitOMXParams(¶ms); 4869 params.nPortIndex = portIndex; 4870 4871 err = mOMXNode->getParameter( 4872 OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 4873 if (err != OK) { 4874 return err; 4875 } 4876 4877 notify->setInt32("channel-count", 1); 4878 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 4879 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 4880 notify->setInt32("sample-rate", 16000); 4881 } else { 4882 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 4883 notify->setInt32("sample-rate", 8000); 4884 } 4885 break; 4886 } 4887 4888 case OMX_AUDIO_CodingFLAC: 4889 { 4890 OMX_AUDIO_PARAM_FLACTYPE params; 4891 InitOMXParams(¶ms); 4892 params.nPortIndex = portIndex; 4893 4894 err = mOMXNode->getParameter( 4895 OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 4896 if (err != OK) { 4897 return err; 4898 } 4899 4900 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 4901 notify->setInt32("channel-count", params.nChannels); 4902 notify->setInt32("sample-rate", params.nSampleRate); 4903 break; 4904 } 4905 4906 case OMX_AUDIO_CodingMP3: 4907 { 4908 OMX_AUDIO_PARAM_MP3TYPE params; 4909 InitOMXParams(¶ms); 4910 params.nPortIndex = portIndex; 4911 4912 err = mOMXNode->getParameter( 4913 OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 4914 if (err != OK) { 4915 return err; 4916 } 4917 4918 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 4919 notify->setInt32("channel-count", params.nChannels); 4920 notify->setInt32("sample-rate", params.nSampleRate); 4921 break; 4922 } 4923 4924 case OMX_AUDIO_CodingVORBIS: 4925 { 4926 OMX_AUDIO_PARAM_VORBISTYPE params; 4927 InitOMXParams(¶ms); 4928 params.nPortIndex = portIndex; 4929 4930 err = mOMXNode->getParameter( 4931 OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 4932 if (err != OK) { 4933 return err; 4934 } 4935 4936 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 4937 notify->setInt32("channel-count", params.nChannels); 4938 notify->setInt32("sample-rate", params.nSampleRate); 4939 break; 4940 } 4941 4942 case OMX_AUDIO_CodingAndroidAC3: 4943 { 4944 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 4945 InitOMXParams(¶ms); 4946 params.nPortIndex = portIndex; 4947 4948 err = mOMXNode->getParameter( 4949 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 4950 ¶ms, sizeof(params)); 4951 if (err != OK) { 4952 return err; 4953 } 4954 4955 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 4956 notify->setInt32("channel-count", params.nChannels); 4957 notify->setInt32("sample-rate", params.nSampleRate); 4958 break; 4959 } 4960 4961 case OMX_AUDIO_CodingAndroidEAC3: 4962 { 4963 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 4964 InitOMXParams(¶ms); 4965 params.nPortIndex = portIndex; 4966 4967 err = mOMXNode->getParameter( 4968 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 4969 ¶ms, sizeof(params)); 4970 if (err != OK) { 4971 return err; 4972 } 4973 4974 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 4975 notify->setInt32("channel-count", params.nChannels); 4976 notify->setInt32("sample-rate", params.nSampleRate); 4977 break; 4978 } 4979 4980 case OMX_AUDIO_CodingAndroidOPUS: 4981 { 4982 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 4983 InitOMXParams(¶ms); 4984 params.nPortIndex = portIndex; 4985 4986 err = mOMXNode->getParameter( 4987 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 4988 ¶ms, sizeof(params)); 4989 if (err != OK) { 4990 return err; 4991 } 4992 4993 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 4994 notify->setInt32("channel-count", params.nChannels); 4995 notify->setInt32("sample-rate", params.nSampleRate); 4996 break; 4997 } 4998 4999 case OMX_AUDIO_CodingG711: 5000 { 5001 OMX_AUDIO_PARAM_PCMMODETYPE params; 5002 InitOMXParams(¶ms); 5003 params.nPortIndex = portIndex; 5004 5005 err = mOMXNode->getParameter( 5006 (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5007 if (err != OK) { 5008 return err; 5009 } 5010 5011 const char *mime = NULL; 5012 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 5013 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 5014 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 5015 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 5016 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 5017 mime = MEDIA_MIMETYPE_AUDIO_RAW; 5018 } 5019 notify->setString("mime", mime); 5020 notify->setInt32("channel-count", params.nChannels); 5021 notify->setInt32("sample-rate", params.nSamplingRate); 5022 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 5023 break; 5024 } 5025 5026 case OMX_AUDIO_CodingGSMFR: 5027 { 5028 OMX_AUDIO_PARAM_PCMMODETYPE params; 5029 InitOMXParams(¶ms); 5030 params.nPortIndex = portIndex; 5031 5032 err = mOMXNode->getParameter( 5033 OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5034 if (err != OK) { 5035 return err; 5036 } 5037 5038 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 5039 notify->setInt32("channel-count", params.nChannels); 5040 notify->setInt32("sample-rate", params.nSamplingRate); 5041 break; 5042 } 5043 5044 default: 5045 ALOGE("Unsupported audio coding: %s(%d)\n", 5046 asString(audioDef->eEncoding), audioDef->eEncoding); 5047 return BAD_TYPE; 5048 } 5049 break; 5050 } 5051 5052 default: 5053 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 5054 return BAD_TYPE; 5055 } 5056 5057 return OK; 5058} 5059 5060void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 5061 // aspects are normally communicated in ColorAspects 5062 int32_t range, standard, transfer; 5063 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 5064 5065 // if some aspects are unspecified, use dataspace fields 5066 if (range != 0) { 5067 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 5068 } 5069 if (standard != 0) { 5070 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 5071 } 5072 if (transfer != 0) { 5073 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 5074 } 5075 5076 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 5077 if (range != 0) { 5078 mOutputFormat->setInt32("color-range", range); 5079 } 5080 if (standard != 0) { 5081 mOutputFormat->setInt32("color-standard", standard); 5082 } 5083 if (transfer != 0) { 5084 mOutputFormat->setInt32("color-transfer", transfer); 5085 } 5086 5087 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 5088 "(R:%d(%s), S:%d(%s), T:%d(%s))", 5089 dataSpace, 5090 aspects.mRange, asString(aspects.mRange), 5091 aspects.mPrimaries, asString(aspects.mPrimaries), 5092 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 5093 aspects.mTransfer, asString(aspects.mTransfer), 5094 range, asString((ColorRange)range), 5095 standard, asString((ColorStandard)standard), 5096 transfer, asString((ColorTransfer)transfer)); 5097} 5098 5099void ACodec::onOutputFormatChanged(sp<const AMessage> expectedFormat) { 5100 // store new output format, at the same time mark that this is no longer the first frame 5101 mOutputFormat = mBaseOutputFormat->dup(); 5102 5103 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 5104 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 5105 return; 5106 } 5107 5108 if (expectedFormat != NULL) { 5109 sp<const AMessage> changes = expectedFormat->changesFrom(mOutputFormat); 5110 sp<const AMessage> to = mOutputFormat->changesFrom(expectedFormat); 5111 if (changes->countEntries() != 0 || to->countEntries() != 0) { 5112 ALOGW("[%s] BAD CODEC: Output format changed unexpectedly from (diff) %s to (diff) %s", 5113 mComponentName.c_str(), 5114 changes->debugString(4).c_str(), to->debugString(4).c_str()); 5115 } 5116 } 5117 5118 if (!mIsVideo && !mIsEncoder) { 5119 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 5120 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5121 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 5122 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5123 5124 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 5125 if (mConverter[kPortIndexOutput] != NULL) { 5126 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 5127 } 5128 } 5129 5130 if (mTunneled) { 5131 sendFormatChange(); 5132 } 5133} 5134 5135void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 5136 AString mime; 5137 CHECK(mOutputFormat->findString("mime", &mime)); 5138 5139 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 5140 // notify renderer of the crop change and dataspace change 5141 // NOTE: native window uses extended right-bottom coordinate 5142 int32_t left, top, right, bottom; 5143 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 5144 notify->setRect("crop", left, top, right + 1, bottom + 1); 5145 } 5146 5147 int32_t dataSpace; 5148 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 5149 notify->setInt32("dataspace", dataSpace); 5150 } 5151 } 5152} 5153 5154void ACodec::sendFormatChange() { 5155 AString mime; 5156 CHECK(mOutputFormat->findString("mime", &mime)); 5157 5158 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 5159 int32_t channelCount, sampleRate; 5160 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 5161 CHECK(mOutputFormat->findInt32("sample-rate", &sampleRate)); 5162 if (mSampleRate != 0 && sampleRate != 0) { 5163 mEncoderDelay = mEncoderDelay * sampleRate / mSampleRate; 5164 mEncoderPadding = mEncoderPadding * sampleRate / mSampleRate; 5165 mSampleRate = sampleRate; 5166 } 5167 if (mSkipCutBuffer != NULL) { 5168 size_t prevbufsize = mSkipCutBuffer->size(); 5169 if (prevbufsize != 0) { 5170 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 5171 } 5172 } 5173 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 5174 } 5175 5176 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 5177 mLastOutputFormat = mOutputFormat; 5178} 5179 5180void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 5181 sp<AMessage> notify = mNotify->dup(); 5182 notify->setInt32("what", CodecBase::kWhatError); 5183 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 5184 5185 if (internalError == UNKNOWN_ERROR) { // find better error code 5186 const status_t omxStatus = statusFromOMXError(error); 5187 if (omxStatus != 0) { 5188 internalError = omxStatus; 5189 } else { 5190 ALOGW("Invalid OMX error %#x", error); 5191 } 5192 } 5193 5194 mFatalError = true; 5195 5196 notify->setInt32("err", internalError); 5197 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5198 notify->post(); 5199} 5200 5201status_t ACodec::requestIDRFrame() { 5202 if (!mIsEncoder) { 5203 return ERROR_UNSUPPORTED; 5204 } 5205 5206 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5207 InitOMXParams(¶ms); 5208 5209 params.nPortIndex = kPortIndexOutput; 5210 params.IntraRefreshVOP = OMX_TRUE; 5211 5212 return mOMXNode->setConfig( 5213 OMX_IndexConfigVideoIntraVOPRefresh, 5214 ¶ms, 5215 sizeof(params)); 5216} 5217 5218//////////////////////////////////////////////////////////////////////////////// 5219 5220ACodec::PortDescription::PortDescription() { 5221} 5222 5223void ACodec::PortDescription::addBuffer( 5224 IOMX::buffer_id id, const sp<MediaCodecBuffer> &buffer) { 5225 mBufferIDs.push_back(id); 5226 mBuffers.push_back(buffer); 5227} 5228 5229size_t ACodec::PortDescription::countBuffers() { 5230 return mBufferIDs.size(); 5231} 5232 5233IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5234 return mBufferIDs.itemAt(index); 5235} 5236 5237sp<MediaCodecBuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5238 return mBuffers.itemAt(index); 5239} 5240 5241//////////////////////////////////////////////////////////////////////////////// 5242 5243ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5244 : AState(parentState), 5245 mCodec(codec) { 5246} 5247 5248ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5249 OMX_U32 /* portIndex */) { 5250 return KEEP_BUFFERS; 5251} 5252 5253bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5254 switch (msg->what()) { 5255 case kWhatInputBufferFilled: 5256 { 5257 onInputBufferFilled(msg); 5258 break; 5259 } 5260 5261 case kWhatOutputBufferDrained: 5262 { 5263 onOutputBufferDrained(msg); 5264 break; 5265 } 5266 5267 case ACodec::kWhatOMXMessageList: 5268 { 5269 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5270 } 5271 5272 case ACodec::kWhatOMXMessageItem: 5273 { 5274 // no need to check as we already did it for kWhatOMXMessageList 5275 return onOMXMessage(msg); 5276 } 5277 5278 case ACodec::kWhatOMXMessage: 5279 { 5280 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5281 } 5282 5283 case ACodec::kWhatSetSurface: 5284 { 5285 sp<AReplyToken> replyID; 5286 CHECK(msg->senderAwaitsResponse(&replyID)); 5287 5288 sp<RefBase> obj; 5289 CHECK(msg->findObject("surface", &obj)); 5290 5291 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5292 5293 sp<AMessage> response = new AMessage; 5294 response->setInt32("err", err); 5295 response->postReply(replyID); 5296 break; 5297 } 5298 5299 case ACodec::kWhatCreateInputSurface: 5300 case ACodec::kWhatSetInputSurface: 5301 case ACodec::kWhatSignalEndOfInputStream: 5302 { 5303 // This may result in an app illegal state exception. 5304 ALOGE("Message 0x%x was not handled", msg->what()); 5305 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5306 return true; 5307 } 5308 5309 case ACodec::kWhatOMXDied: 5310 { 5311 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5312 ALOGE("OMX/mediaserver died, signalling error!"); 5313 mCodec->mGraphicBufferSource.clear(); 5314 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5315 break; 5316 } 5317 5318 case ACodec::kWhatReleaseCodecInstance: 5319 { 5320 ALOGI("[%s] forcing the release of codec", 5321 mCodec->mComponentName.c_str()); 5322 status_t err = mCodec->mOMXNode->freeNode(); 5323 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5324 mCodec->mComponentName.c_str(), err); 5325 sp<AMessage> notify = mCodec->mNotify->dup(); 5326 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5327 notify->post(); 5328 break; 5329 } 5330 5331 default: 5332 return false; 5333 } 5334 5335 return true; 5336} 5337 5338bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5339 // there is a possibility that this is an outstanding message for a 5340 // codec that we have already destroyed 5341 if (mCodec->mOMXNode == NULL) { 5342 ALOGI("ignoring message as already freed component: %s", 5343 msg->debugString().c_str()); 5344 return false; 5345 } 5346 5347 int32_t generation; 5348 CHECK(msg->findInt32("generation", (int32_t*)&generation)); 5349 if (generation != mCodec->mNodeGeneration) { 5350 ALOGW("Unexpected message for component: %s, gen %u, cur %u", 5351 msg->debugString().c_str(), generation, mCodec->mNodeGeneration); 5352 return false; 5353 } 5354 return true; 5355} 5356 5357bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5358 sp<RefBase> obj; 5359 CHECK(msg->findObject("messages", &obj)); 5360 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5361 5362 bool receivedRenderedEvents = false; 5363 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5364 it != msgList->getList().cend(); ++it) { 5365 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5366 mCodec->handleMessage(*it); 5367 int32_t type; 5368 CHECK((*it)->findInt32("type", &type)); 5369 if (type == omx_message::FRAME_RENDERED) { 5370 receivedRenderedEvents = true; 5371 } 5372 } 5373 5374 if (receivedRenderedEvents) { 5375 // NOTE: all buffers are rendered in this case 5376 mCodec->notifyOfRenderedFrames(); 5377 } 5378 return true; 5379} 5380 5381bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5382 int32_t type; 5383 CHECK(msg->findInt32("type", &type)); 5384 5385 switch (type) { 5386 case omx_message::EVENT: 5387 { 5388 int32_t event, data1, data2; 5389 CHECK(msg->findInt32("event", &event)); 5390 CHECK(msg->findInt32("data1", &data1)); 5391 CHECK(msg->findInt32("data2", &data2)); 5392 5393 if (event == OMX_EventCmdComplete 5394 && data1 == OMX_CommandFlush 5395 && data2 == (int32_t)OMX_ALL) { 5396 // Use of this notification is not consistent across 5397 // implementations. We'll drop this notification and rely 5398 // on flush-complete notifications on the individual port 5399 // indices instead. 5400 5401 return true; 5402 } 5403 5404 return onOMXEvent( 5405 static_cast<OMX_EVENTTYPE>(event), 5406 static_cast<OMX_U32>(data1), 5407 static_cast<OMX_U32>(data2)); 5408 } 5409 5410 case omx_message::EMPTY_BUFFER_DONE: 5411 { 5412 IOMX::buffer_id bufferID; 5413 int32_t fenceFd; 5414 5415 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5416 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5417 5418 return onOMXEmptyBufferDone(bufferID, fenceFd); 5419 } 5420 5421 case omx_message::FILL_BUFFER_DONE: 5422 { 5423 IOMX::buffer_id bufferID; 5424 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5425 5426 int32_t rangeOffset, rangeLength, flags, fenceFd; 5427 int64_t timeUs; 5428 5429 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5430 CHECK(msg->findInt32("range_length", &rangeLength)); 5431 CHECK(msg->findInt32("flags", &flags)); 5432 CHECK(msg->findInt64("timestamp", &timeUs)); 5433 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5434 5435 return onOMXFillBufferDone( 5436 bufferID, 5437 (size_t)rangeOffset, (size_t)rangeLength, 5438 (OMX_U32)flags, 5439 timeUs, 5440 fenceFd); 5441 } 5442 5443 case omx_message::FRAME_RENDERED: 5444 { 5445 int64_t mediaTimeUs, systemNano; 5446 5447 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5448 CHECK(msg->findInt64("system_nano", &systemNano)); 5449 5450 return onOMXFrameRendered( 5451 mediaTimeUs, systemNano); 5452 } 5453 5454 default: 5455 ALOGE("Unexpected message type: %d", type); 5456 return false; 5457 } 5458} 5459 5460bool ACodec::BaseState::onOMXFrameRendered( 5461 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5462 // ignore outside of Executing and PortSettingsChanged states 5463 return true; 5464} 5465 5466bool ACodec::BaseState::onOMXEvent( 5467 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5468 if (event == OMX_EventDataSpaceChanged) { 5469 ColorAspects aspects = ColorUtils::unpackToColorAspects(data2); 5470 5471 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5472 return true; 5473 } 5474 5475 if (event != OMX_EventError) { 5476 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5477 mCodec->mComponentName.c_str(), event, data1, data2); 5478 5479 return false; 5480 } 5481 5482 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5483 5484 // verify OMX component sends back an error we expect. 5485 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5486 if (!isOMXError(omxError)) { 5487 ALOGW("Invalid OMX error %#x", omxError); 5488 omxError = OMX_ErrorUndefined; 5489 } 5490 mCodec->signalError(omxError); 5491 5492 return true; 5493} 5494 5495bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5496 ALOGV("[%s] onOMXEmptyBufferDone %u", 5497 mCodec->mComponentName.c_str(), bufferID); 5498 5499 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5500 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5501 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5502 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5503 mCodec->dumpBuffers(kPortIndexInput); 5504 if (fenceFd >= 0) { 5505 ::close(fenceFd); 5506 } 5507 return false; 5508 } 5509 info->mStatus = BufferInfo::OWNED_BY_US; 5510 5511 // input buffers cannot take fences, so wait for any fence now 5512 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5513 fenceFd = -1; 5514 5515 // still save fence for completeness 5516 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5517 5518 // We're in "store-metadata-in-buffers" mode, the underlying 5519 // OMX component had access to data that's implicitly refcounted 5520 // by this "MediaBuffer" object. Now that the OMX component has 5521 // told us that it's done with the input buffer, we can decrement 5522 // the mediaBuffer's reference count. 5523 info->mData->setMediaBufferBase(NULL); 5524 5525 PortMode mode = getPortMode(kPortIndexInput); 5526 5527 switch (mode) { 5528 case KEEP_BUFFERS: 5529 break; 5530 5531 case RESUBMIT_BUFFERS: 5532 postFillThisBuffer(info); 5533 break; 5534 5535 case FREE_BUFFERS: 5536 default: 5537 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5538 return false; 5539 } 5540 5541 return true; 5542} 5543 5544void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5545 if (mCodec->mPortEOS[kPortIndexInput]) { 5546 return; 5547 } 5548 5549 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5550 5551 sp<AMessage> notify = mCodec->mNotify->dup(); 5552 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5553 notify->setInt32("buffer-id", info->mBufferID); 5554 5555 info->mData->setFormat(mCodec->mInputFormat); 5556 notify->setObject("buffer", info->mData); 5557 info->mData.clear(); 5558 5559 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5560 reply->setInt32("buffer-id", info->mBufferID); 5561 5562 notify->setMessage("reply", reply); 5563 5564 notify->post(); 5565 5566 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5567} 5568 5569void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5570 IOMX::buffer_id bufferID; 5571 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5572 sp<MediaCodecBuffer> buffer; 5573 int32_t err = OK; 5574 bool eos = false; 5575 PortMode mode = getPortMode(kPortIndexInput); 5576 5577 sp<RefBase> obj; 5578 if (!msg->findObject("buffer", &obj)) { 5579 /* these are unfilled buffers returned by client */ 5580 CHECK(msg->findInt32("err", &err)); 5581 5582 if (err == OK) { 5583 /* buffers with no errors are returned on MediaCodec.flush */ 5584 mode = KEEP_BUFFERS; 5585 } else { 5586 ALOGV("[%s] saw error %d instead of an input buffer", 5587 mCodec->mComponentName.c_str(), err); 5588 eos = true; 5589 } 5590 } else { 5591 buffer = static_cast<MediaCodecBuffer *>(obj.get()); 5592 } 5593 5594 int32_t tmp; 5595 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5596 eos = true; 5597 err = ERROR_END_OF_STREAM; 5598 } 5599 5600 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5601 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5602 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5603 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5604 mCodec->dumpBuffers(kPortIndexInput); 5605 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5606 return; 5607 } 5608 5609 info->mStatus = BufferInfo::OWNED_BY_US; 5610 info->mData = buffer; 5611 5612 switch (mode) { 5613 case KEEP_BUFFERS: 5614 { 5615 if (eos) { 5616 if (!mCodec->mPortEOS[kPortIndexInput]) { 5617 mCodec->mPortEOS[kPortIndexInput] = true; 5618 mCodec->mInputEOSResult = err; 5619 } 5620 } 5621 break; 5622 } 5623 5624 case RESUBMIT_BUFFERS: 5625 { 5626 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5627 // Do not send empty input buffer w/o EOS to the component. 5628 if (buffer->size() == 0 && !eos) { 5629 postFillThisBuffer(info); 5630 break; 5631 } 5632 5633 int64_t timeUs; 5634 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5635 5636 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5637 5638 MetadataBufferType metaType = mCodec->mInputMetadataType; 5639 int32_t isCSD = 0; 5640 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5641 if (mCodec->mIsLegacyVP9Decoder) { 5642 ALOGV("[%s] is legacy VP9 decoder. Ignore %u codec specific data", 5643 mCodec->mComponentName.c_str(), bufferID); 5644 postFillThisBuffer(info); 5645 break; 5646 } 5647 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5648 metaType = kMetadataBufferTypeInvalid; 5649 } 5650 5651 if (eos) { 5652 flags |= OMX_BUFFERFLAG_EOS; 5653 } 5654 5655 size_t size = buffer->size(); 5656 if (buffer->base() != info->mCodecData->base()) { 5657 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5658 mCodec->mComponentName.c_str(), 5659 bufferID, 5660 buffer->base(), info->mCodecData->base()); 5661 5662 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; 5663 if (converter == NULL || isCSD) { 5664 converter = getCopyConverter(); 5665 } 5666 status_t err = converter->convert(buffer, info->mCodecData); 5667 if (err != OK) { 5668 mCodec->signalError(OMX_ErrorUndefined, err); 5669 return; 5670 } 5671 size = info->mCodecData->size(); 5672 } else { 5673 info->mCodecData->setRange(0, size); 5674 } 5675 5676 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5677 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5678 mCodec->mComponentName.c_str(), bufferID); 5679 } else if (flags & OMX_BUFFERFLAG_EOS) { 5680 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5681 mCodec->mComponentName.c_str(), bufferID); 5682 } else { 5683#if TRACK_BUFFER_TIMING 5684 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5685 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5686#else 5687 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5688 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5689#endif 5690 } 5691 5692#if TRACK_BUFFER_TIMING 5693 ACodec::BufferStats stats; 5694 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5695 stats.mFillBufferDoneTimeUs = -1ll; 5696 mCodec->mBufferStats.add(timeUs, stats); 5697#endif 5698 5699 if (mCodec->storingMetadataInDecodedBuffers()) { 5700 // try to submit an output buffer for each input buffer 5701 PortMode outputMode = getPortMode(kPortIndexOutput); 5702 5703 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 5704 mCodec->mMetadataBuffersToSubmit, 5705 (outputMode == FREE_BUFFERS ? "FREE" : 5706 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 5707 if (outputMode == RESUBMIT_BUFFERS) { 5708 mCodec->submitOutputMetadataBuffer(); 5709 } 5710 } 5711 info->checkReadFence("onInputBufferFilled"); 5712 5713 status_t err2 = OK; 5714 switch (metaType) { 5715 case kMetadataBufferTypeInvalid: 5716 { 5717 err2 = mCodec->mOMXNode->emptyBuffer( 5718 bufferID, info->mCodecData, flags, timeUs, info->mFenceFd); 5719 } 5720 break; 5721#ifndef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5722 case kMetadataBufferTypeNativeHandleSource: 5723 if (info->mCodecData->size() >= sizeof(VideoNativeHandleMetadata)) { 5724 VideoNativeHandleMetadata *vnhmd = 5725 (VideoNativeHandleMetadata*)info->mCodecData->base(); 5726 sp<NativeHandle> handle = NativeHandle::create( 5727 vnhmd->pHandle, false /* ownsHandle */); 5728 err2 = mCodec->mOMXNode->emptyBuffer( 5729 bufferID, handle, flags, timeUs, info->mFenceFd); 5730 } 5731 break; 5732 case kMetadataBufferTypeANWBuffer: 5733 if (info->mCodecData->size() >= sizeof(VideoNativeMetadata)) { 5734 VideoNativeMetadata *vnmd = (VideoNativeMetadata*)info->mCodecData->base(); 5735 sp<GraphicBuffer> graphicBuffer = new GraphicBuffer( 5736 vnmd->pBuffer, false /* keepOwnership */); 5737 err2 = mCodec->mOMXNode->emptyBuffer( 5738 bufferID, graphicBuffer, flags, timeUs, info->mFenceFd); 5739 } 5740 break; 5741#endif 5742 default: 5743 ALOGW("Can't marshall %s data in %zu sized buffers in %zu-bit mode", 5744 asString(metaType), info->mCodecData->size(), 5745 sizeof(buffer_handle_t) * 8); 5746 err2 = ERROR_UNSUPPORTED; 5747 break; 5748 } 5749 5750 info->mFenceFd = -1; 5751 if (err2 != OK) { 5752 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5753 return; 5754 } 5755 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5756 // Hold the reference while component is using the buffer. 5757 info->mData = buffer; 5758 5759 if (!eos && err == OK) { 5760 getMoreInputDataIfPossible(); 5761 } else { 5762 ALOGV("[%s] Signalled EOS (%d) on the input port", 5763 mCodec->mComponentName.c_str(), err); 5764 5765 mCodec->mPortEOS[kPortIndexInput] = true; 5766 mCodec->mInputEOSResult = err; 5767 } 5768 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 5769 if (err != OK && err != ERROR_END_OF_STREAM) { 5770 ALOGV("[%s] Signalling EOS on the input port due to error %d", 5771 mCodec->mComponentName.c_str(), err); 5772 } else { 5773 ALOGV("[%s] Signalling EOS on the input port", 5774 mCodec->mComponentName.c_str()); 5775 } 5776 5777 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 5778 mCodec->mComponentName.c_str(), bufferID); 5779 5780 info->checkReadFence("onInputBufferFilled"); 5781 status_t err2 = mCodec->mOMXNode->emptyBuffer( 5782 bufferID, OMXBuffer::sPreset, OMX_BUFFERFLAG_EOS, 0, info->mFenceFd); 5783 info->mFenceFd = -1; 5784 if (err2 != OK) { 5785 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5786 return; 5787 } 5788 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5789 5790 mCodec->mPortEOS[kPortIndexInput] = true; 5791 mCodec->mInputEOSResult = err; 5792 } 5793 break; 5794 } 5795 5796 case FREE_BUFFERS: 5797 break; 5798 5799 default: 5800 ALOGE("invalid port mode: %d", mode); 5801 break; 5802 } 5803} 5804 5805void ACodec::BaseState::getMoreInputDataIfPossible() { 5806 if (mCodec->mPortEOS[kPortIndexInput]) { 5807 return; 5808 } 5809 5810 BufferInfo *eligible = NULL; 5811 5812 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 5813 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5814 5815#if 0 5816 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 5817 // There's already a "read" pending. 5818 return; 5819 } 5820#endif 5821 5822 if (info->mStatus == BufferInfo::OWNED_BY_US) { 5823 eligible = info; 5824 } 5825 } 5826 5827 if (eligible == NULL) { 5828 return; 5829 } 5830 5831 postFillThisBuffer(eligible); 5832} 5833 5834bool ACodec::BaseState::onOMXFillBufferDone( 5835 IOMX::buffer_id bufferID, 5836 size_t rangeOffset, size_t rangeLength, 5837 OMX_U32 flags, 5838 int64_t timeUs, 5839 int fenceFd) { 5840 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 5841 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 5842 5843 ssize_t index; 5844 status_t err= OK; 5845 5846#if TRACK_BUFFER_TIMING 5847 index = mCodec->mBufferStats.indexOfKey(timeUs); 5848 if (index >= 0) { 5849 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 5850 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 5851 5852 ALOGI("frame PTS %lld: %lld", 5853 timeUs, 5854 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 5855 5856 mCodec->mBufferStats.removeItemsAt(index); 5857 stats = NULL; 5858 } 5859#endif 5860 5861 BufferInfo *info = 5862 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5863 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5864 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5865 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5866 mCodec->dumpBuffers(kPortIndexOutput); 5867 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5868 if (fenceFd >= 0) { 5869 ::close(fenceFd); 5870 } 5871 return true; 5872 } 5873 5874 info->mDequeuedAt = ++mCodec->mDequeueCounter; 5875 info->mStatus = BufferInfo::OWNED_BY_US; 5876 5877 if (info->mRenderInfo != NULL) { 5878 // The fence for an emptied buffer must have signaled, but there still could be queued 5879 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 5880 // as we will soon requeue this buffer to the surface. While in theory we could still keep 5881 // track of buffers that are requeued to the surface, it is better to add support to the 5882 // buffer-queue to notify us of released buffers and their fences (in the future). 5883 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 5884 } 5885 5886 // byte buffers cannot take fences, so wait for any fence now 5887 if (mCodec->mNativeWindow == NULL) { 5888 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 5889 fenceFd = -1; 5890 } 5891 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 5892 5893 PortMode mode = getPortMode(kPortIndexOutput); 5894 5895 switch (mode) { 5896 case KEEP_BUFFERS: 5897 break; 5898 5899 case RESUBMIT_BUFFERS: 5900 { 5901 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 5902 || mCodec->mPortEOS[kPortIndexOutput])) { 5903 ALOGV("[%s] calling fillBuffer %u", 5904 mCodec->mComponentName.c_str(), info->mBufferID); 5905 5906 err = mCodec->fillBuffer(info); 5907 if (err != OK) { 5908 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5909 return true; 5910 } 5911 break; 5912 } 5913 5914 sp<AMessage> reply = 5915 new AMessage(kWhatOutputBufferDrained, mCodec); 5916 sp<MediaCodecBuffer> buffer = info->mData; 5917 5918 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 5919 // pretend that output format has changed on the first frame (we used to do this) 5920 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) { 5921 mCodec->onOutputFormatChanged(mCodec->mOutputFormat); 5922 } 5923 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 5924 mCodec->sendFormatChange(); 5925 } else if (rangeLength > 0 && mCodec->mNativeWindow != NULL) { 5926 // If potentially rendering onto a surface, always save key format data (crop & 5927 // data space) so that we can set it if and once the buffer is rendered. 5928 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 5929 } 5930 buffer->setFormat(mCodec->mOutputFormat); 5931 5932 if (mCodec->usingMetadataOnEncoderOutput()) { 5933 native_handle_t *handle = NULL; 5934 VideoNativeHandleMetadata &nativeMeta = 5935 *(VideoNativeHandleMetadata *)buffer->data(); 5936 if (buffer->size() >= sizeof(nativeMeta) 5937 && nativeMeta.eType == kMetadataBufferTypeNativeHandleSource) { 5938#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5939 // handle is only valid on 32-bit/mediaserver process 5940 handle = NULL; 5941#else 5942 handle = (native_handle_t *)nativeMeta.pHandle; 5943#endif 5944 } 5945 buffer->meta()->setPointer("handle", handle); 5946 buffer->meta()->setInt32("rangeOffset", rangeOffset); 5947 buffer->meta()->setInt32("rangeLength", rangeLength); 5948 } else if (buffer->base() == info->mCodecData->base()) { 5949 buffer->setRange(rangeOffset, rangeLength); 5950 } else { 5951 info->mCodecData->setRange(rangeOffset, rangeLength); 5952 // in this case we know that mConverter is not null 5953 status_t err = mCodec->mConverter[kPortIndexOutput]->convert( 5954 info->mCodecData, buffer); 5955 if (err != OK) { 5956 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5957 return true; 5958 } 5959 } 5960#if 0 5961 if (mCodec->mNativeWindow == NULL) { 5962 if (IsIDR(info->mData)) { 5963 ALOGI("IDR frame"); 5964 } 5965 } 5966#endif 5967 5968 if (mCodec->mSkipCutBuffer != NULL) { 5969 mCodec->mSkipCutBuffer->submit(buffer); 5970 } 5971 buffer->meta()->setInt64("timeUs", timeUs); 5972 5973 sp<AMessage> notify = mCodec->mNotify->dup(); 5974 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 5975 notify->setInt32("buffer-id", info->mBufferID); 5976 notify->setObject("buffer", buffer); 5977 info->mData.clear(); 5978 notify->setInt32("flags", flags); 5979 5980 reply->setInt32("buffer-id", info->mBufferID); 5981 5982 notify->setMessage("reply", reply); 5983 5984 notify->post(); 5985 5986 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 5987 5988 if (flags & OMX_BUFFERFLAG_EOS) { 5989 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 5990 5991 sp<AMessage> notify = mCodec->mNotify->dup(); 5992 notify->setInt32("what", CodecBase::kWhatEOS); 5993 notify->setInt32("err", mCodec->mInputEOSResult); 5994 notify->post(); 5995 5996 mCodec->mPortEOS[kPortIndexOutput] = true; 5997 } 5998 break; 5999 } 6000 6001 case FREE_BUFFERS: 6002 err = mCodec->freeBuffer(kPortIndexOutput, index); 6003 if (err != OK) { 6004 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6005 return true; 6006 } 6007 break; 6008 6009 default: 6010 ALOGE("Invalid port mode: %d", mode); 6011 return false; 6012 } 6013 6014 return true; 6015} 6016 6017void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 6018 IOMX::buffer_id bufferID; 6019 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 6020 sp<RefBase> obj; 6021 sp<MediaCodecBuffer> buffer = nullptr; 6022 if (msg->findObject("buffer", &obj)) { 6023 buffer = static_cast<MediaCodecBuffer *>(obj.get()); 6024 } 6025 ssize_t index; 6026 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6027 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6028 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 6029 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6030 mCodec->dumpBuffers(kPortIndexOutput); 6031 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6032 return; 6033 } 6034 info->mData = buffer; 6035 6036 android_native_rect_t crop; 6037 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom) 6038 && memcmp(&crop, &mCodec->mLastNativeWindowCrop, sizeof(crop)) != 0) { 6039 mCodec->mLastNativeWindowCrop = crop; 6040 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 6041 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 6042 } 6043 6044 int32_t dataSpace; 6045 if (msg->findInt32("dataspace", &dataSpace) 6046 && dataSpace != mCodec->mLastNativeWindowDataSpace) { 6047 status_t err = native_window_set_buffers_data_space( 6048 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 6049 mCodec->mLastNativeWindowDataSpace = dataSpace; 6050 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 6051 } 6052 6053 int32_t render; 6054 if (mCodec->mNativeWindow != NULL 6055 && msg->findInt32("render", &render) && render != 0 6056 && buffer != NULL && buffer->size() != 0) { 6057 ATRACE_NAME("render"); 6058 // The client wants this buffer to be rendered. 6059 6060 // save buffers sent to the surface so we can get render time when they return 6061 int64_t mediaTimeUs = -1; 6062 buffer->meta()->findInt64("timeUs", &mediaTimeUs); 6063 if (mediaTimeUs >= 0) { 6064 mCodec->mRenderTracker.onFrameQueued( 6065 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 6066 } 6067 6068 int64_t timestampNs = 0; 6069 if (!msg->findInt64("timestampNs", ×tampNs)) { 6070 // use media timestamp if client did not request a specific render timestamp 6071 if (buffer->meta()->findInt64("timeUs", ×tampNs)) { 6072 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 6073 timestampNs *= 1000; 6074 } 6075 } 6076 6077 status_t err; 6078 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 6079 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 6080 6081 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 6082 err = mCodec->mNativeWindow->queueBuffer( 6083 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 6084 info->mFenceFd = -1; 6085 if (err == OK) { 6086 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 6087 } else { 6088 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 6089 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6090 info->mStatus = BufferInfo::OWNED_BY_US; 6091 // keeping read fence as write fence to avoid clobbering 6092 info->mIsReadFence = false; 6093 } 6094 } else { 6095 if (mCodec->mNativeWindow != NULL && 6096 (buffer == NULL || buffer->size() != 0)) { 6097 // move read fence into write fence to avoid clobbering 6098 info->mIsReadFence = false; 6099 ATRACE_NAME("frame-drop"); 6100 } 6101 info->mStatus = BufferInfo::OWNED_BY_US; 6102 } 6103 6104 PortMode mode = getPortMode(kPortIndexOutput); 6105 6106 switch (mode) { 6107 case KEEP_BUFFERS: 6108 { 6109 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 6110 6111 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6112 // We cannot resubmit the buffer we just rendered, dequeue 6113 // the spare instead. 6114 6115 info = mCodec->dequeueBufferFromNativeWindow(); 6116 } 6117 break; 6118 } 6119 6120 case RESUBMIT_BUFFERS: 6121 { 6122 if (!mCodec->mPortEOS[kPortIndexOutput]) { 6123 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6124 // We cannot resubmit the buffer we just rendered, dequeue 6125 // the spare instead. 6126 6127 info = mCodec->dequeueBufferFromNativeWindow(); 6128 } 6129 6130 if (info != NULL) { 6131 ALOGV("[%s] calling fillBuffer %u", 6132 mCodec->mComponentName.c_str(), info->mBufferID); 6133 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 6134 status_t err = mCodec->fillBuffer(info); 6135 if (err != OK) { 6136 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6137 } 6138 } 6139 } 6140 break; 6141 } 6142 6143 case FREE_BUFFERS: 6144 { 6145 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 6146 if (err != OK) { 6147 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6148 } 6149 break; 6150 } 6151 6152 default: 6153 ALOGE("Invalid port mode: %d", mode); 6154 return; 6155 } 6156} 6157 6158//////////////////////////////////////////////////////////////////////////////// 6159 6160ACodec::UninitializedState::UninitializedState(ACodec *codec) 6161 : BaseState(codec) { 6162} 6163 6164void ACodec::UninitializedState::stateEntered() { 6165 ALOGV("Now uninitialized"); 6166 6167 if (mDeathNotifier != NULL) { 6168 if (mCodec->mOMXNode != NULL) { 6169 sp<IBinder> binder = IInterface::asBinder(mCodec->mOMXNode); 6170 binder->unlinkToDeath(mDeathNotifier); 6171 } 6172 mDeathNotifier.clear(); 6173 } 6174 6175 mCodec->mUsingNativeWindow = false; 6176 mCodec->mNativeWindow.clear(); 6177 mCodec->mNativeWindowUsageBits = 0; 6178 mCodec->mOMX.clear(); 6179 mCodec->mOMXNode.clear(); 6180 mCodec->mFlags = 0; 6181 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 6182 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 6183 mCodec->mConverter[0].clear(); 6184 mCodec->mConverter[1].clear(); 6185 mCodec->mComponentName.clear(); 6186} 6187 6188bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 6189 bool handled = false; 6190 6191 switch (msg->what()) { 6192 case ACodec::kWhatSetup: 6193 { 6194 onSetup(msg); 6195 6196 handled = true; 6197 break; 6198 } 6199 6200 case ACodec::kWhatAllocateComponent: 6201 { 6202 onAllocateComponent(msg); 6203 handled = true; 6204 break; 6205 } 6206 6207 case ACodec::kWhatShutdown: 6208 { 6209 int32_t keepComponentAllocated; 6210 CHECK(msg->findInt32( 6211 "keepComponentAllocated", &keepComponentAllocated)); 6212 ALOGW_IF(keepComponentAllocated, 6213 "cannot keep component allocated on shutdown in Uninitialized state"); 6214 6215 sp<AMessage> notify = mCodec->mNotify->dup(); 6216 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6217 notify->post(); 6218 6219 handled = true; 6220 break; 6221 } 6222 6223 case ACodec::kWhatFlush: 6224 { 6225 sp<AMessage> notify = mCodec->mNotify->dup(); 6226 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6227 notify->post(); 6228 6229 handled = true; 6230 break; 6231 } 6232 6233 case ACodec::kWhatReleaseCodecInstance: 6234 { 6235 // nothing to do, as we have already signaled shutdown 6236 handled = true; 6237 break; 6238 } 6239 6240 default: 6241 return BaseState::onMessageReceived(msg); 6242 } 6243 6244 return handled; 6245} 6246 6247void ACodec::UninitializedState::onSetup( 6248 const sp<AMessage> &msg) { 6249 if (onAllocateComponent(msg) 6250 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6251 mCodec->mLoadedState->onStart(); 6252 } 6253} 6254 6255bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6256 ALOGV("onAllocateComponent"); 6257 6258 CHECK(mCodec->mOMXNode == NULL); 6259 6260 OMXClient client; 6261 if (client.connect() != OK) { 6262 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6263 return false; 6264 } 6265 6266 sp<IOMX> omx = client.interface(); 6267 6268 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6269 6270 Vector<AString> matchingCodecs; 6271 6272 AString mime; 6273 6274 AString componentName; 6275 uint32_t quirks = 0; 6276 int32_t encoder = false; 6277 if (msg->findString("componentName", &componentName)) { 6278 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6279 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6280 matchingCodecs.add(componentName); 6281 } 6282 } else { 6283 CHECK(msg->findString("mime", &mime)); 6284 6285 if (!msg->findInt32("encoder", &encoder)) { 6286 encoder = false; 6287 } 6288 6289 MediaCodecList::findMatchingCodecs( 6290 mime.c_str(), 6291 encoder, // createEncoder 6292 0, // flags 6293 &matchingCodecs); 6294 } 6295 6296 sp<CodecObserver> observer = new CodecObserver; 6297 sp<IOMXNode> omxNode; 6298 6299 status_t err = NAME_NOT_FOUND; 6300 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6301 ++matchIndex) { 6302 componentName = matchingCodecs[matchIndex]; 6303 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6304 6305 pid_t tid = gettid(); 6306 int prevPriority = androidGetThreadPriority(tid); 6307 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6308 err = omx->allocateNode(componentName.c_str(), observer, &omxNode); 6309 androidSetThreadPriority(tid, prevPriority); 6310 6311 if (err == OK) { 6312 break; 6313 } else { 6314 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6315 } 6316 6317 omxNode = NULL; 6318 } 6319 6320 if (omxNode == NULL) { 6321 if (!mime.empty()) { 6322 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6323 encoder ? "en" : "de", mime.c_str(), err); 6324 } else { 6325 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6326 } 6327 6328 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6329 return false; 6330 } 6331 6332 mDeathNotifier = new DeathNotifier(notify); 6333 if (IInterface::asBinder(omxNode)->linkToDeath(mDeathNotifier) != OK) { 6334 // This was a local binder, if it dies so do we, we won't care 6335 // about any notifications in the afterlife. 6336 mDeathNotifier.clear(); 6337 } 6338 6339 notify = new AMessage(kWhatOMXMessageList, mCodec); 6340 notify->setInt32("generation", ++mCodec->mNodeGeneration); 6341 observer->setNotificationMessage(notify); 6342 6343 mCodec->mComponentName = componentName; 6344 mCodec->mRenderTracker.setComponentName(componentName); 6345 mCodec->mFlags = 0; 6346 6347 if (componentName.endsWith(".secure")) { 6348 mCodec->mFlags |= kFlagIsSecure; 6349 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6350 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6351 } 6352 6353 omxNode->setQuirks(quirks); 6354 mCodec->mOMX = omx; 6355 mCodec->mOMXNode = omxNode; 6356 6357 { 6358 sp<AMessage> notify = mCodec->mNotify->dup(); 6359 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6360 notify->setString("componentName", mCodec->mComponentName.c_str()); 6361 notify->post(); 6362 } 6363 6364 mCodec->changeState(mCodec->mLoadedState); 6365 6366 return true; 6367} 6368 6369//////////////////////////////////////////////////////////////////////////////// 6370 6371ACodec::LoadedState::LoadedState(ACodec *codec) 6372 : BaseState(codec) { 6373} 6374 6375void ACodec::LoadedState::stateEntered() { 6376 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6377 6378 mCodec->mPortEOS[kPortIndexInput] = 6379 mCodec->mPortEOS[kPortIndexOutput] = false; 6380 6381 mCodec->mInputEOSResult = OK; 6382 6383 mCodec->mDequeueCounter = 0; 6384 mCodec->mMetadataBuffersToSubmit = 0; 6385 mCodec->mRepeatFrameDelayUs = -1ll; 6386 mCodec->mInputFormat.clear(); 6387 mCodec->mOutputFormat.clear(); 6388 mCodec->mBaseOutputFormat.clear(); 6389 mCodec->mGraphicBufferSource.clear(); 6390 6391 if (mCodec->mShutdownInProgress) { 6392 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6393 6394 mCodec->mShutdownInProgress = false; 6395 mCodec->mKeepComponentAllocated = false; 6396 6397 onShutdown(keepComponentAllocated); 6398 } 6399 mCodec->mExplicitShutdown = false; 6400 6401 mCodec->processDeferredMessages(); 6402} 6403 6404void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6405 if (!keepComponentAllocated) { 6406 (void)mCodec->mOMXNode->freeNode(); 6407 6408 mCodec->changeState(mCodec->mUninitializedState); 6409 } 6410 6411 if (mCodec->mExplicitShutdown) { 6412 sp<AMessage> notify = mCodec->mNotify->dup(); 6413 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6414 notify->post(); 6415 mCodec->mExplicitShutdown = false; 6416 } 6417} 6418 6419bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6420 bool handled = false; 6421 6422 switch (msg->what()) { 6423 case ACodec::kWhatConfigureComponent: 6424 { 6425 onConfigureComponent(msg); 6426 handled = true; 6427 break; 6428 } 6429 6430 case ACodec::kWhatCreateInputSurface: 6431 { 6432 onCreateInputSurface(msg); 6433 handled = true; 6434 break; 6435 } 6436 6437 case ACodec::kWhatSetInputSurface: 6438 { 6439 onSetInputSurface(msg); 6440 handled = true; 6441 break; 6442 } 6443 6444 case ACodec::kWhatStart: 6445 { 6446 onStart(); 6447 handled = true; 6448 break; 6449 } 6450 6451 case ACodec::kWhatShutdown: 6452 { 6453 int32_t keepComponentAllocated; 6454 CHECK(msg->findInt32( 6455 "keepComponentAllocated", &keepComponentAllocated)); 6456 6457 mCodec->mExplicitShutdown = true; 6458 onShutdown(keepComponentAllocated); 6459 6460 handled = true; 6461 break; 6462 } 6463 6464 case ACodec::kWhatFlush: 6465 { 6466 sp<AMessage> notify = mCodec->mNotify->dup(); 6467 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6468 notify->post(); 6469 6470 handled = true; 6471 break; 6472 } 6473 6474 default: 6475 return BaseState::onMessageReceived(msg); 6476 } 6477 6478 return handled; 6479} 6480 6481bool ACodec::LoadedState::onConfigureComponent( 6482 const sp<AMessage> &msg) { 6483 ALOGV("onConfigureComponent"); 6484 6485 CHECK(mCodec->mOMXNode != NULL); 6486 6487 status_t err = OK; 6488 AString mime; 6489 if (!msg->findString("mime", &mime)) { 6490 err = BAD_VALUE; 6491 } else { 6492 err = mCodec->configureCodec(mime.c_str(), msg); 6493 } 6494 if (err != OK) { 6495 ALOGE("[%s] configureCodec returning error %d", 6496 mCodec->mComponentName.c_str(), err); 6497 6498 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6499 return false; 6500 } 6501 6502 { 6503 sp<AMessage> notify = mCodec->mNotify->dup(); 6504 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6505 notify->setMessage("input-format", mCodec->mInputFormat); 6506 notify->setMessage("output-format", mCodec->mOutputFormat); 6507 notify->post(); 6508 } 6509 6510 return true; 6511} 6512 6513status_t ACodec::LoadedState::setupInputSurface() { 6514 if (mCodec->mGraphicBufferSource == NULL) { 6515 return BAD_VALUE; 6516 } 6517 6518 android_dataspace dataSpace; 6519 status_t err = 6520 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6521 if (err != OK) { 6522 ALOGE("Failed to get default data space"); 6523 return err; 6524 } 6525 6526 err = statusFromBinderStatus( 6527 mCodec->mGraphicBufferSource->configure(mCodec->mOMXNode, dataSpace)); 6528 if (err != OK) { 6529 ALOGE("[%s] Unable to configure for node (err %d)", 6530 mCodec->mComponentName.c_str(), err); 6531 return err; 6532 } 6533 6534 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6535 err = statusFromBinderStatus( 6536 mCodec->mGraphicBufferSource->setRepeatPreviousFrameDelayUs( 6537 mCodec->mRepeatFrameDelayUs)); 6538 6539 if (err != OK) { 6540 ALOGE("[%s] Unable to configure option to repeat previous " 6541 "frames (err %d)", 6542 mCodec->mComponentName.c_str(), err); 6543 return err; 6544 } 6545 } 6546 6547 if (mCodec->mMaxPtsGapUs > 0ll) { 6548 OMX_PARAM_U32TYPE maxPtsGapParams; 6549 InitOMXParams(&maxPtsGapParams); 6550 maxPtsGapParams.nPortIndex = kPortIndexInput; 6551 maxPtsGapParams.nU32 = (uint32_t) mCodec->mMaxPtsGapUs; 6552 6553 err = mCodec->mOMXNode->setParameter( 6554 (OMX_INDEXTYPE)OMX_IndexParamMaxFrameDurationForBitrateControl, 6555 &maxPtsGapParams, sizeof(maxPtsGapParams)); 6556 6557 if (err != OK) { 6558 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6559 mCodec->mComponentName.c_str(), err); 6560 return err; 6561 } 6562 } 6563 6564 if (mCodec->mMaxFps > 0) { 6565 err = statusFromBinderStatus( 6566 mCodec->mGraphicBufferSource->setMaxFps(mCodec->mMaxFps)); 6567 6568 if (err != OK) { 6569 ALOGE("[%s] Unable to configure max fps (err %d)", 6570 mCodec->mComponentName.c_str(), err); 6571 return err; 6572 } 6573 } 6574 6575 if (mCodec->mTimePerCaptureUs > 0ll 6576 && mCodec->mTimePerFrameUs > 0ll) { 6577 err = statusFromBinderStatus( 6578 mCodec->mGraphicBufferSource->setTimeLapseConfig( 6579 mCodec->mTimePerFrameUs, mCodec->mTimePerCaptureUs)); 6580 6581 if (err != OK) { 6582 ALOGE("[%s] Unable to configure time lapse (err %d)", 6583 mCodec->mComponentName.c_str(), err); 6584 return err; 6585 } 6586 } 6587 6588 if (mCodec->mCreateInputBuffersSuspended) { 6589 err = statusFromBinderStatus( 6590 mCodec->mGraphicBufferSource->setSuspend(true)); 6591 6592 if (err != OK) { 6593 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6594 mCodec->mComponentName.c_str(), err); 6595 return err; 6596 } 6597 } 6598 6599 uint32_t usageBits; 6600 if (mCodec->mOMXNode->getParameter( 6601 (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6602 &usageBits, sizeof(usageBits)) == OK) { 6603 mCodec->mInputFormat->setInt32( 6604 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6605 } 6606 6607 sp<ABuffer> colorAspectsBuffer; 6608 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6609 if (colorAspectsBuffer->size() != sizeof(ColorAspects)) { 6610 return INVALID_OPERATION; 6611 } 6612 6613 err = statusFromBinderStatus( 6614 mCodec->mGraphicBufferSource->setColorAspects(ColorUtils::packToU32( 6615 *(ColorAspects *)colorAspectsBuffer->base()))); 6616 6617 if (err != OK) { 6618 ALOGE("[%s] Unable to configure color aspects (err %d)", 6619 mCodec->mComponentName.c_str(), err); 6620 return err; 6621 } 6622 } 6623 return OK; 6624} 6625 6626void ACodec::LoadedState::onCreateInputSurface( 6627 const sp<AMessage> & /* msg */) { 6628 ALOGV("onCreateInputSurface"); 6629 6630 sp<AMessage> notify = mCodec->mNotify->dup(); 6631 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6632 6633 sp<IGraphicBufferProducer> bufferProducer; 6634 status_t err = mCodec->mOMX->createInputSurface( 6635 &bufferProducer, &mCodec->mGraphicBufferSource); 6636 6637 if (err == OK) { 6638 err = setupInputSurface(); 6639 } 6640 6641 if (err == OK) { 6642 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6643 6644 notify->setMessage("input-format", mCodec->mInputFormat); 6645 notify->setMessage("output-format", mCodec->mOutputFormat); 6646 6647 notify->setObject("input-surface", 6648 new BufferProducerWrapper(bufferProducer)); 6649 } else { 6650 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6651 // the error through because it's in the "configured" state. We 6652 // send a kWhatInputSurfaceCreated with an error value instead. 6653 ALOGE("[%s] onCreateInputSurface returning error %d", 6654 mCodec->mComponentName.c_str(), err); 6655 notify->setInt32("err", err); 6656 } 6657 notify->post(); 6658} 6659 6660void ACodec::LoadedState::onSetInputSurface( 6661 const sp<AMessage> &msg) { 6662 ALOGV("onSetInputSurface"); 6663 6664 sp<AMessage> notify = mCodec->mNotify->dup(); 6665 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6666 6667 sp<RefBase> obj; 6668 CHECK(msg->findObject("input-surface", &obj)); 6669 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6670 mCodec->mGraphicBufferSource = surface->getBufferSource(); 6671 6672 status_t err = setupInputSurface(); 6673 6674 if (err == OK) { 6675 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6676 6677 notify->setMessage("input-format", mCodec->mInputFormat); 6678 notify->setMessage("output-format", mCodec->mOutputFormat); 6679 } else { 6680 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6681 // the error through because it's in the "configured" state. We 6682 // send a kWhatInputSurfaceAccepted with an error value instead. 6683 ALOGE("[%s] onSetInputSurface returning error %d", 6684 mCodec->mComponentName.c_str(), err); 6685 notify->setInt32("err", err); 6686 } 6687 notify->post(); 6688} 6689 6690void ACodec::LoadedState::onStart() { 6691 ALOGV("onStart"); 6692 6693 status_t err = mCodec->mOMXNode->sendCommand(OMX_CommandStateSet, OMX_StateIdle); 6694 if (err != OK) { 6695 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6696 } else { 6697 mCodec->changeState(mCodec->mLoadedToIdleState); 6698 } 6699} 6700 6701//////////////////////////////////////////////////////////////////////////////// 6702 6703ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 6704 : BaseState(codec) { 6705} 6706 6707void ACodec::LoadedToIdleState::stateEntered() { 6708 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 6709 6710 status_t err; 6711 if ((err = allocateBuffers()) != OK) { 6712 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 6713 "(error 0x%08x)", 6714 err); 6715 6716 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6717 6718 mCodec->mOMXNode->sendCommand( 6719 OMX_CommandStateSet, OMX_StateLoaded); 6720 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 6721 mCodec->freeBuffersOnPort(kPortIndexInput); 6722 } 6723 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 6724 mCodec->freeBuffersOnPort(kPortIndexOutput); 6725 } 6726 6727 mCodec->changeState(mCodec->mLoadedState); 6728 } 6729} 6730 6731status_t ACodec::LoadedToIdleState::allocateBuffers() { 6732 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 6733 6734 if (err != OK) { 6735 return err; 6736 } 6737 6738 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 6739} 6740 6741bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 6742 switch (msg->what()) { 6743 case kWhatSetParameters: 6744 case kWhatShutdown: 6745 { 6746 mCodec->deferMessage(msg); 6747 return true; 6748 } 6749 6750 case kWhatSignalEndOfInputStream: 6751 { 6752 mCodec->onSignalEndOfInputStream(); 6753 return true; 6754 } 6755 6756 case kWhatResume: 6757 { 6758 // We'll be active soon enough. 6759 return true; 6760 } 6761 6762 case kWhatFlush: 6763 { 6764 // We haven't even started yet, so we're flushed alright... 6765 sp<AMessage> notify = mCodec->mNotify->dup(); 6766 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6767 notify->post(); 6768 return true; 6769 } 6770 6771 default: 6772 return BaseState::onMessageReceived(msg); 6773 } 6774} 6775 6776bool ACodec::LoadedToIdleState::onOMXEvent( 6777 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6778 switch (event) { 6779 case OMX_EventCmdComplete: 6780 { 6781 status_t err = OK; 6782 if (data1 != (OMX_U32)OMX_CommandStateSet 6783 || data2 != (OMX_U32)OMX_StateIdle) { 6784 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 6785 asString((OMX_COMMANDTYPE)data1), data1, 6786 asString((OMX_STATETYPE)data2), data2); 6787 err = FAILED_TRANSACTION; 6788 } 6789 6790 if (err == OK) { 6791 err = mCodec->mOMXNode->sendCommand( 6792 OMX_CommandStateSet, OMX_StateExecuting); 6793 } 6794 6795 if (err != OK) { 6796 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6797 } else { 6798 mCodec->changeState(mCodec->mIdleToExecutingState); 6799 } 6800 6801 return true; 6802 } 6803 6804 default: 6805 return BaseState::onOMXEvent(event, data1, data2); 6806 } 6807} 6808 6809//////////////////////////////////////////////////////////////////////////////// 6810 6811ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 6812 : BaseState(codec) { 6813} 6814 6815void ACodec::IdleToExecutingState::stateEntered() { 6816 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 6817} 6818 6819bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6820 switch (msg->what()) { 6821 case kWhatSetParameters: 6822 case kWhatShutdown: 6823 { 6824 mCodec->deferMessage(msg); 6825 return true; 6826 } 6827 6828 case kWhatResume: 6829 { 6830 // We'll be active soon enough. 6831 return true; 6832 } 6833 6834 case kWhatFlush: 6835 { 6836 // We haven't even started yet, so we're flushed alright... 6837 sp<AMessage> notify = mCodec->mNotify->dup(); 6838 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6839 notify->post(); 6840 6841 return true; 6842 } 6843 6844 case kWhatSignalEndOfInputStream: 6845 { 6846 mCodec->onSignalEndOfInputStream(); 6847 return true; 6848 } 6849 6850 default: 6851 return BaseState::onMessageReceived(msg); 6852 } 6853} 6854 6855bool ACodec::IdleToExecutingState::onOMXEvent( 6856 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6857 switch (event) { 6858 case OMX_EventCmdComplete: 6859 { 6860 if (data1 != (OMX_U32)OMX_CommandStateSet 6861 || data2 != (OMX_U32)OMX_StateExecuting) { 6862 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 6863 asString((OMX_COMMANDTYPE)data1), data1, 6864 asString((OMX_STATETYPE)data2), data2); 6865 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6866 return true; 6867 } 6868 6869 mCodec->mExecutingState->resume(); 6870 mCodec->changeState(mCodec->mExecutingState); 6871 6872 return true; 6873 } 6874 6875 default: 6876 return BaseState::onOMXEvent(event, data1, data2); 6877 } 6878} 6879 6880//////////////////////////////////////////////////////////////////////////////// 6881 6882ACodec::ExecutingState::ExecutingState(ACodec *codec) 6883 : BaseState(codec), 6884 mActive(false) { 6885} 6886 6887ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 6888 OMX_U32 /* portIndex */) { 6889 return RESUBMIT_BUFFERS; 6890} 6891 6892void ACodec::ExecutingState::submitOutputMetaBuffers() { 6893 // submit as many buffers as there are input buffers with the codec 6894 // in case we are in port reconfiguring 6895 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 6896 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6897 6898 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 6899 if (mCodec->submitOutputMetadataBuffer() != OK) 6900 break; 6901 } 6902 } 6903 6904 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 6905 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 6906} 6907 6908void ACodec::ExecutingState::submitRegularOutputBuffers() { 6909 bool failed = false; 6910 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 6911 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 6912 6913 if (mCodec->mNativeWindow != NULL) { 6914 if (info->mStatus != BufferInfo::OWNED_BY_US 6915 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6916 ALOGE("buffers should be owned by us or the surface"); 6917 failed = true; 6918 break; 6919 } 6920 6921 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6922 continue; 6923 } 6924 } else { 6925 if (info->mStatus != BufferInfo::OWNED_BY_US) { 6926 ALOGE("buffers should be owned by us"); 6927 failed = true; 6928 break; 6929 } 6930 } 6931 6932 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 6933 6934 info->checkWriteFence("submitRegularOutputBuffers"); 6935 status_t err = mCodec->fillBuffer(info); 6936 if (err != OK) { 6937 failed = true; 6938 break; 6939 } 6940 } 6941 6942 if (failed) { 6943 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6944 } 6945} 6946 6947void ACodec::ExecutingState::submitOutputBuffers() { 6948 submitRegularOutputBuffers(); 6949 if (mCodec->storingMetadataInDecodedBuffers()) { 6950 submitOutputMetaBuffers(); 6951 } 6952} 6953 6954void ACodec::ExecutingState::resume() { 6955 if (mActive) { 6956 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 6957 return; 6958 } 6959 6960 submitOutputBuffers(); 6961 6962 // Post all available input buffers 6963 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 6964 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 6965 } 6966 6967 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 6968 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6969 if (info->mStatus == BufferInfo::OWNED_BY_US) { 6970 postFillThisBuffer(info); 6971 } 6972 } 6973 6974 mActive = true; 6975} 6976 6977void ACodec::ExecutingState::stateEntered() { 6978 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 6979 6980 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 6981 mCodec->processDeferredMessages(); 6982} 6983 6984bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6985 bool handled = false; 6986 6987 switch (msg->what()) { 6988 case kWhatShutdown: 6989 { 6990 int32_t keepComponentAllocated; 6991 CHECK(msg->findInt32( 6992 "keepComponentAllocated", &keepComponentAllocated)); 6993 6994 mCodec->mShutdownInProgress = true; 6995 mCodec->mExplicitShutdown = true; 6996 mCodec->mKeepComponentAllocated = keepComponentAllocated; 6997 6998 mActive = false; 6999 7000 status_t err = mCodec->mOMXNode->sendCommand( 7001 OMX_CommandStateSet, OMX_StateIdle); 7002 if (err != OK) { 7003 if (keepComponentAllocated) { 7004 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7005 } 7006 // TODO: do some recovery here. 7007 } else { 7008 mCodec->changeState(mCodec->mExecutingToIdleState); 7009 } 7010 7011 handled = true; 7012 break; 7013 } 7014 7015 case kWhatFlush: 7016 { 7017 ALOGV("[%s] ExecutingState flushing now " 7018 "(codec owns %zu/%zu input, %zu/%zu output).", 7019 mCodec->mComponentName.c_str(), 7020 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 7021 mCodec->mBuffers[kPortIndexInput].size(), 7022 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 7023 mCodec->mBuffers[kPortIndexOutput].size()); 7024 7025 mActive = false; 7026 7027 status_t err = mCodec->mOMXNode->sendCommand(OMX_CommandFlush, OMX_ALL); 7028 if (err != OK) { 7029 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7030 } else { 7031 mCodec->changeState(mCodec->mFlushingState); 7032 } 7033 7034 handled = true; 7035 break; 7036 } 7037 7038 case kWhatResume: 7039 { 7040 resume(); 7041 7042 handled = true; 7043 break; 7044 } 7045 7046 case kWhatRequestIDRFrame: 7047 { 7048 status_t err = mCodec->requestIDRFrame(); 7049 if (err != OK) { 7050 ALOGW("Requesting an IDR frame failed."); 7051 } 7052 7053 handled = true; 7054 break; 7055 } 7056 7057 case kWhatSetParameters: 7058 { 7059 sp<AMessage> params; 7060 CHECK(msg->findMessage("params", ¶ms)); 7061 7062 status_t err = mCodec->setParameters(params); 7063 7064 sp<AMessage> reply; 7065 if (msg->findMessage("reply", &reply)) { 7066 reply->setInt32("err", err); 7067 reply->post(); 7068 } 7069 7070 handled = true; 7071 break; 7072 } 7073 7074 case ACodec::kWhatSignalEndOfInputStream: 7075 { 7076 mCodec->onSignalEndOfInputStream(); 7077 handled = true; 7078 break; 7079 } 7080 7081 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7082 case kWhatSubmitOutputMetadataBufferIfEOS: 7083 { 7084 if (mCodec->mPortEOS[kPortIndexInput] && 7085 !mCodec->mPortEOS[kPortIndexOutput]) { 7086 status_t err = mCodec->submitOutputMetadataBuffer(); 7087 if (err == OK) { 7088 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7089 } 7090 } 7091 return true; 7092 } 7093 7094 default: 7095 handled = BaseState::onMessageReceived(msg); 7096 break; 7097 } 7098 7099 return handled; 7100} 7101 7102status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 7103 int32_t videoBitrate; 7104 if (params->findInt32("video-bitrate", &videoBitrate)) { 7105 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 7106 InitOMXParams(&configParams); 7107 configParams.nPortIndex = kPortIndexOutput; 7108 configParams.nEncodeBitrate = videoBitrate; 7109 7110 status_t err = mOMXNode->setConfig( 7111 OMX_IndexConfigVideoBitrate, 7112 &configParams, 7113 sizeof(configParams)); 7114 7115 if (err != OK) { 7116 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 7117 videoBitrate, err); 7118 7119 return err; 7120 } 7121 } 7122 7123 int64_t timeOffsetUs; 7124 if (params->findInt64("time-offset-us", &timeOffsetUs)) { 7125 if (mGraphicBufferSource == NULL) { 7126 ALOGE("[%s] Invalid to set input buffer time offset without surface", 7127 mComponentName.c_str()); 7128 return INVALID_OPERATION; 7129 } 7130 7131 status_t err = statusFromBinderStatus( 7132 mGraphicBufferSource->setTimeOffsetUs(timeOffsetUs)); 7133 7134 if (err != OK) { 7135 ALOGE("[%s] Unable to set input buffer time offset (err %d)", 7136 mComponentName.c_str(), 7137 err); 7138 return err; 7139 } 7140 } 7141 7142 int64_t skipFramesBeforeUs; 7143 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 7144 if (mGraphicBufferSource == NULL) { 7145 ALOGE("[%s] Invalid to set start time without surface", 7146 mComponentName.c_str()); 7147 return INVALID_OPERATION; 7148 } 7149 7150 status_t err = statusFromBinderStatus( 7151 mGraphicBufferSource->setStartTimeUs(skipFramesBeforeUs)); 7152 7153 if (err != OK) { 7154 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 7155 return err; 7156 } 7157 } 7158 7159 int32_t dropInputFrames; 7160 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 7161 if (mGraphicBufferSource == NULL) { 7162 ALOGE("[%s] Invalid to set suspend without surface", 7163 mComponentName.c_str()); 7164 return INVALID_OPERATION; 7165 } 7166 7167 status_t err = statusFromBinderStatus( 7168 mGraphicBufferSource->setSuspend(dropInputFrames != 0)); 7169 7170 if (err != OK) { 7171 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 7172 return err; 7173 } 7174 } 7175 7176 int32_t dummy; 7177 if (params->findInt32("request-sync", &dummy)) { 7178 status_t err = requestIDRFrame(); 7179 7180 if (err != OK) { 7181 ALOGE("Requesting a sync frame failed w/ err %d", err); 7182 return err; 7183 } 7184 } 7185 7186 float rate; 7187 if (params->findFloat("operating-rate", &rate) && rate > 0) { 7188 status_t err = setOperatingRate(rate, mIsVideo); 7189 if (err != OK) { 7190 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 7191 return err; 7192 } 7193 } 7194 7195 int32_t intraRefreshPeriod = 0; 7196 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 7197 && intraRefreshPeriod > 0) { 7198 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 7199 if (err != OK) { 7200 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 7201 mComponentName.c_str()); 7202 err = OK; 7203 } 7204 } 7205 7206 status_t err = configureTemporalLayers(params, false /* inConfigure */, mOutputFormat); 7207 if (err != OK) { 7208 err = OK; // ignore failure 7209 } 7210 7211 return err; 7212} 7213 7214void ACodec::onSignalEndOfInputStream() { 7215 sp<AMessage> notify = mNotify->dup(); 7216 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 7217 7218 status_t err = INVALID_OPERATION; 7219 if (mGraphicBufferSource != NULL) { 7220 err = statusFromBinderStatus(mGraphicBufferSource->signalEndOfInputStream()); 7221 } 7222 if (err != OK) { 7223 notify->setInt32("err", err); 7224 } 7225 notify->post(); 7226} 7227 7228bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 7229 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7230 return true; 7231} 7232 7233bool ACodec::ExecutingState::onOMXEvent( 7234 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7235 switch (event) { 7236 case OMX_EventPortSettingsChanged: 7237 { 7238 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 7239 7240 mCodec->onOutputFormatChanged(); 7241 7242 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 7243 mCodec->mMetadataBuffersToSubmit = 0; 7244 CHECK_EQ(mCodec->mOMXNode->sendCommand( 7245 OMX_CommandPortDisable, kPortIndexOutput), 7246 (status_t)OK); 7247 7248 mCodec->freeOutputBuffersNotOwnedByComponent(); 7249 7250 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7251 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7252 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7253 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7254 mCodec->mComponentName.c_str(), data2); 7255 } 7256 7257 return true; 7258 } 7259 7260 case OMX_EventBufferFlag: 7261 { 7262 return true; 7263 } 7264 7265 default: 7266 return BaseState::onOMXEvent(event, data1, data2); 7267 } 7268} 7269 7270//////////////////////////////////////////////////////////////////////////////// 7271 7272ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7273 ACodec *codec) 7274 : BaseState(codec) { 7275} 7276 7277ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7278 OMX_U32 portIndex) { 7279 if (portIndex == kPortIndexOutput) { 7280 return FREE_BUFFERS; 7281 } 7282 7283 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7284 7285 return RESUBMIT_BUFFERS; 7286} 7287 7288bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7289 const sp<AMessage> &msg) { 7290 bool handled = false; 7291 7292 switch (msg->what()) { 7293 case kWhatFlush: 7294 case kWhatShutdown: 7295 case kWhatResume: 7296 case kWhatSetParameters: 7297 { 7298 if (msg->what() == kWhatResume) { 7299 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7300 } 7301 7302 mCodec->deferMessage(msg); 7303 handled = true; 7304 break; 7305 } 7306 7307 default: 7308 handled = BaseState::onMessageReceived(msg); 7309 break; 7310 } 7311 7312 return handled; 7313} 7314 7315void ACodec::OutputPortSettingsChangedState::stateEntered() { 7316 ALOGV("[%s] Now handling output port settings change", 7317 mCodec->mComponentName.c_str()); 7318} 7319 7320bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7321 int64_t mediaTimeUs, nsecs_t systemNano) { 7322 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7323 return true; 7324} 7325 7326bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7327 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7328 switch (event) { 7329 case OMX_EventCmdComplete: 7330 { 7331 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7332 if (data2 != (OMX_U32)kPortIndexOutput) { 7333 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7334 return false; 7335 } 7336 7337 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7338 7339 status_t err = OK; 7340 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7341 ALOGE("disabled port should be empty, but has %zu buffers", 7342 mCodec->mBuffers[kPortIndexOutput].size()); 7343 err = FAILED_TRANSACTION; 7344 } else { 7345 mCodec->mDealer[kPortIndexOutput].clear(); 7346 } 7347 7348 if (err == OK) { 7349 err = mCodec->mOMXNode->sendCommand( 7350 OMX_CommandPortEnable, kPortIndexOutput); 7351 } 7352 7353 if (err == OK) { 7354 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7355 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7356 "reconfiguration: (%d)", err); 7357 } 7358 7359 if (err != OK) { 7360 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7361 7362 // This is technically not correct, but appears to be 7363 // the only way to free the component instance. 7364 // Controlled transitioning from excecuting->idle 7365 // and idle->loaded seem impossible probably because 7366 // the output port never finishes re-enabling. 7367 mCodec->mShutdownInProgress = true; 7368 mCodec->mKeepComponentAllocated = false; 7369 mCodec->changeState(mCodec->mLoadedState); 7370 } 7371 7372 return true; 7373 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7374 if (data2 != (OMX_U32)kPortIndexOutput) { 7375 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7376 return false; 7377 } 7378 7379 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7380 7381 if (mCodec->mExecutingState->active()) { 7382 mCodec->mExecutingState->submitOutputBuffers(); 7383 } 7384 7385 mCodec->changeState(mCodec->mExecutingState); 7386 7387 return true; 7388 } 7389 7390 return false; 7391 } 7392 7393 default: 7394 return false; 7395 } 7396} 7397 7398//////////////////////////////////////////////////////////////////////////////// 7399 7400ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7401 : BaseState(codec), 7402 mComponentNowIdle(false) { 7403} 7404 7405bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7406 bool handled = false; 7407 7408 switch (msg->what()) { 7409 case kWhatFlush: 7410 { 7411 // Don't send me a flush request if you previously wanted me 7412 // to shutdown. 7413 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7414 break; 7415 } 7416 7417 case kWhatShutdown: 7418 { 7419 // We're already doing that... 7420 7421 handled = true; 7422 break; 7423 } 7424 7425 default: 7426 handled = BaseState::onMessageReceived(msg); 7427 break; 7428 } 7429 7430 return handled; 7431} 7432 7433void ACodec::ExecutingToIdleState::stateEntered() { 7434 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7435 7436 mComponentNowIdle = false; 7437 mCodec->mLastOutputFormat.clear(); 7438} 7439 7440bool ACodec::ExecutingToIdleState::onOMXEvent( 7441 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7442 switch (event) { 7443 case OMX_EventCmdComplete: 7444 { 7445 if (data1 != (OMX_U32)OMX_CommandStateSet 7446 || data2 != (OMX_U32)OMX_StateIdle) { 7447 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7448 asString((OMX_COMMANDTYPE)data1), data1, 7449 asString((OMX_STATETYPE)data2), data2); 7450 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7451 return true; 7452 } 7453 7454 mComponentNowIdle = true; 7455 7456 changeStateIfWeOwnAllBuffers(); 7457 7458 return true; 7459 } 7460 7461 case OMX_EventPortSettingsChanged: 7462 case OMX_EventBufferFlag: 7463 { 7464 // We're shutting down and don't care about this anymore. 7465 return true; 7466 } 7467 7468 default: 7469 return BaseState::onOMXEvent(event, data1, data2); 7470 } 7471} 7472 7473void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7474 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7475 status_t err = mCodec->mOMXNode->sendCommand( 7476 OMX_CommandStateSet, OMX_StateLoaded); 7477 if (err == OK) { 7478 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7479 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7480 if (err == OK) { 7481 err = err2; 7482 } 7483 } 7484 7485 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7486 && mCodec->mNativeWindow != NULL) { 7487 // We push enough 1x1 blank buffers to ensure that one of 7488 // them has made it to the display. This allows the OMX 7489 // component teardown to zero out any protected buffers 7490 // without the risk of scanning out one of those buffers. 7491 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7492 } 7493 7494 if (err != OK) { 7495 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7496 return; 7497 } 7498 7499 mCodec->changeState(mCodec->mIdleToLoadedState); 7500 } 7501} 7502 7503void ACodec::ExecutingToIdleState::onInputBufferFilled( 7504 const sp<AMessage> &msg) { 7505 BaseState::onInputBufferFilled(msg); 7506 7507 changeStateIfWeOwnAllBuffers(); 7508} 7509 7510void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7511 const sp<AMessage> &msg) { 7512 BaseState::onOutputBufferDrained(msg); 7513 7514 changeStateIfWeOwnAllBuffers(); 7515} 7516 7517//////////////////////////////////////////////////////////////////////////////// 7518 7519ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7520 : BaseState(codec) { 7521} 7522 7523bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7524 bool handled = false; 7525 7526 switch (msg->what()) { 7527 case kWhatShutdown: 7528 { 7529 // We're already doing that... 7530 7531 handled = true; 7532 break; 7533 } 7534 7535 case kWhatFlush: 7536 { 7537 // Don't send me a flush request if you previously wanted me 7538 // to shutdown. 7539 ALOGE("Got flush request in IdleToLoadedState"); 7540 break; 7541 } 7542 7543 default: 7544 handled = BaseState::onMessageReceived(msg); 7545 break; 7546 } 7547 7548 return handled; 7549} 7550 7551void ACodec::IdleToLoadedState::stateEntered() { 7552 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7553} 7554 7555bool ACodec::IdleToLoadedState::onOMXEvent( 7556 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7557 switch (event) { 7558 case OMX_EventCmdComplete: 7559 { 7560 if (data1 != (OMX_U32)OMX_CommandStateSet 7561 || data2 != (OMX_U32)OMX_StateLoaded) { 7562 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7563 asString((OMX_COMMANDTYPE)data1), data1, 7564 asString((OMX_STATETYPE)data2), data2); 7565 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7566 return true; 7567 } 7568 7569 mCodec->changeState(mCodec->mLoadedState); 7570 7571 return true; 7572 } 7573 7574 default: 7575 return BaseState::onOMXEvent(event, data1, data2); 7576 } 7577} 7578 7579//////////////////////////////////////////////////////////////////////////////// 7580 7581ACodec::FlushingState::FlushingState(ACodec *codec) 7582 : BaseState(codec) { 7583} 7584 7585void ACodec::FlushingState::stateEntered() { 7586 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7587 7588 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7589} 7590 7591bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7592 bool handled = false; 7593 7594 switch (msg->what()) { 7595 case kWhatShutdown: 7596 { 7597 mCodec->deferMessage(msg); 7598 break; 7599 } 7600 7601 case kWhatFlush: 7602 { 7603 // We're already doing this right now. 7604 handled = true; 7605 break; 7606 } 7607 7608 default: 7609 handled = BaseState::onMessageReceived(msg); 7610 break; 7611 } 7612 7613 return handled; 7614} 7615 7616bool ACodec::FlushingState::onOMXEvent( 7617 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7618 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7619 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7620 7621 switch (event) { 7622 case OMX_EventCmdComplete: 7623 { 7624 if (data1 != (OMX_U32)OMX_CommandFlush) { 7625 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7626 asString((OMX_COMMANDTYPE)data1), data1, data2); 7627 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7628 return true; 7629 } 7630 7631 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7632 if (mFlushComplete[data2]) { 7633 ALOGW("Flush already completed for %s port", 7634 data2 == kPortIndexInput ? "input" : "output"); 7635 return true; 7636 } 7637 mFlushComplete[data2] = true; 7638 7639 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7640 changeStateIfWeOwnAllBuffers(); 7641 } 7642 } else if (data2 == OMX_ALL) { 7643 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7644 ALOGW("received flush complete event for OMX_ALL before ports have been" 7645 "flushed (%d/%d)", 7646 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7647 return false; 7648 } 7649 7650 changeStateIfWeOwnAllBuffers(); 7651 } else { 7652 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7653 } 7654 7655 return true; 7656 } 7657 7658 case OMX_EventPortSettingsChanged: 7659 { 7660 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7661 msg->setInt32("type", omx_message::EVENT); 7662 msg->setInt32("generation", mCodec->mNodeGeneration); 7663 msg->setInt32("event", event); 7664 msg->setInt32("data1", data1); 7665 msg->setInt32("data2", data2); 7666 7667 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7668 mCodec->mComponentName.c_str()); 7669 7670 mCodec->deferMessage(msg); 7671 7672 return true; 7673 } 7674 7675 default: 7676 return BaseState::onOMXEvent(event, data1, data2); 7677 } 7678 7679 return true; 7680} 7681 7682void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7683 BaseState::onOutputBufferDrained(msg); 7684 7685 changeStateIfWeOwnAllBuffers(); 7686} 7687 7688void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7689 BaseState::onInputBufferFilled(msg); 7690 7691 changeStateIfWeOwnAllBuffers(); 7692} 7693 7694void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 7695 if (mFlushComplete[kPortIndexInput] 7696 && mFlushComplete[kPortIndexOutput] 7697 && mCodec->allYourBuffersAreBelongToUs()) { 7698 // We now own all buffers except possibly those still queued with 7699 // the native window for rendering. Let's get those back as well. 7700 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 7701 7702 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7703 7704 sp<AMessage> notify = mCodec->mNotify->dup(); 7705 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7706 notify->post(); 7707 7708 mCodec->mPortEOS[kPortIndexInput] = 7709 mCodec->mPortEOS[kPortIndexOutput] = false; 7710 7711 mCodec->mInputEOSResult = OK; 7712 7713 if (mCodec->mSkipCutBuffer != NULL) { 7714 mCodec->mSkipCutBuffer->clear(); 7715 } 7716 7717 mCodec->changeState(mCodec->mExecutingState); 7718 } 7719} 7720 7721status_t ACodec::queryCapabilities( 7722 const AString &name, const AString &mime, bool isEncoder, 7723 sp<MediaCodecInfo::Capabilities> *caps) { 7724 (*caps).clear(); 7725 const char *role = GetComponentRole(isEncoder, mime.c_str()); 7726 if (role == NULL) { 7727 return BAD_VALUE; 7728 } 7729 7730 OMXClient client; 7731 status_t err = client.connect(); 7732 if (err != OK) { 7733 return err; 7734 } 7735 7736 sp<IOMX> omx = client.interface(); 7737 sp<CodecObserver> observer = new CodecObserver; 7738 sp<IOMXNode> omxNode; 7739 7740 err = omx->allocateNode(name.c_str(), observer, &omxNode); 7741 if (err != OK) { 7742 client.disconnect(); 7743 return err; 7744 } 7745 7746 err = SetComponentRole(omxNode, role); 7747 if (err != OK) { 7748 omxNode->freeNode(); 7749 client.disconnect(); 7750 return err; 7751 } 7752 7753 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 7754 bool isVideo = mime.startsWithIgnoreCase("video/"); 7755 7756 if (isVideo) { 7757 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 7758 InitOMXParams(¶m); 7759 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7760 7761 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7762 param.nProfileIndex = index; 7763 status_t err = omxNode->getParameter( 7764 OMX_IndexParamVideoProfileLevelQuerySupported, 7765 ¶m, sizeof(param)); 7766 if (err != OK) { 7767 break; 7768 } 7769 builder->addProfileLevel(param.eProfile, param.eLevel); 7770 7771 if (index == kMaxIndicesToCheck) { 7772 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 7773 name.c_str(), index, 7774 param.eProfile, param.eLevel); 7775 } 7776 } 7777 7778 // Color format query 7779 // return colors in the order reported by the OMX component 7780 // prefix "flexible" standard ones with the flexible equivalent 7781 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 7782 InitOMXParams(&portFormat); 7783 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 7784 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 7785 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7786 portFormat.nIndex = index; 7787 status_t err = omxNode->getParameter( 7788 OMX_IndexParamVideoPortFormat, 7789 &portFormat, sizeof(portFormat)); 7790 if (err != OK) { 7791 break; 7792 } 7793 7794 OMX_U32 flexibleEquivalent; 7795 if (IsFlexibleColorFormat( 7796 omxNode, portFormat.eColorFormat, false /* usingNativeWindow */, 7797 &flexibleEquivalent)) { 7798 bool marked = false; 7799 for (size_t i = 0; i < supportedColors.size(); ++i) { 7800 if (supportedColors[i] == flexibleEquivalent) { 7801 marked = true; 7802 break; 7803 } 7804 } 7805 if (!marked) { 7806 supportedColors.push(flexibleEquivalent); 7807 builder->addColorFormat(flexibleEquivalent); 7808 } 7809 } 7810 supportedColors.push(portFormat.eColorFormat); 7811 builder->addColorFormat(portFormat.eColorFormat); 7812 7813 if (index == kMaxIndicesToCheck) { 7814 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 7815 name.c_str(), index, 7816 asString(portFormat.eColorFormat), portFormat.eColorFormat); 7817 } 7818 } 7819 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 7820 // More audio codecs if they have profiles. 7821 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 7822 InitOMXParams(¶m); 7823 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7824 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7825 param.nProfileIndex = index; 7826 status_t err = omxNode->getParameter( 7827 (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 7828 ¶m, sizeof(param)); 7829 if (err != OK) { 7830 break; 7831 } 7832 // For audio, level is ignored. 7833 builder->addProfileLevel(param.eProfile, 0 /* level */); 7834 7835 if (index == kMaxIndicesToCheck) { 7836 ALOGW("[%s] stopping checking profiles after %u: %x", 7837 name.c_str(), index, 7838 param.eProfile); 7839 } 7840 } 7841 7842 // NOTE: Without Android extensions, OMX does not provide a way to query 7843 // AAC profile support 7844 if (param.nProfileIndex == 0) { 7845 ALOGW("component %s doesn't support profile query.", name.c_str()); 7846 } 7847 } 7848 7849 if (isVideo && !isEncoder) { 7850 native_handle_t *sidebandHandle = NULL; 7851 if (omxNode->configureVideoTunnelMode( 7852 kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 7853 // tunneled playback includes adaptive playback 7854 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 7855 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 7856 } else if (omxNode->storeMetaDataInBuffers( 7857 kPortIndexOutput, OMX_TRUE) == OK || 7858 omxNode->prepareForAdaptivePlayback( 7859 kPortIndexOutput, OMX_TRUE, 7860 1280 /* width */, 720 /* height */) == OK) { 7861 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 7862 } 7863 } 7864 7865 if (isVideo && isEncoder) { 7866 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 7867 InitOMXParams(¶ms); 7868 params.nPortIndex = kPortIndexOutput; 7869 // TODO: should we verify if fallback is supported? 7870 if (omxNode->getConfig( 7871 (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 7872 ¶ms, sizeof(params)) == OK) { 7873 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 7874 } 7875 } 7876 7877 *caps = builder; 7878 omxNode->freeNode(); 7879 client.disconnect(); 7880 return OK; 7881} 7882 7883// These are supposed be equivalent to the logic in 7884// "audio_channel_out_mask_from_count". 7885//static 7886status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 7887 switch (numChannels) { 7888 case 1: 7889 map[0] = OMX_AUDIO_ChannelCF; 7890 break; 7891 case 2: 7892 map[0] = OMX_AUDIO_ChannelLF; 7893 map[1] = OMX_AUDIO_ChannelRF; 7894 break; 7895 case 3: 7896 map[0] = OMX_AUDIO_ChannelLF; 7897 map[1] = OMX_AUDIO_ChannelRF; 7898 map[2] = OMX_AUDIO_ChannelCF; 7899 break; 7900 case 4: 7901 map[0] = OMX_AUDIO_ChannelLF; 7902 map[1] = OMX_AUDIO_ChannelRF; 7903 map[2] = OMX_AUDIO_ChannelLR; 7904 map[3] = OMX_AUDIO_ChannelRR; 7905 break; 7906 case 5: 7907 map[0] = OMX_AUDIO_ChannelLF; 7908 map[1] = OMX_AUDIO_ChannelRF; 7909 map[2] = OMX_AUDIO_ChannelCF; 7910 map[3] = OMX_AUDIO_ChannelLR; 7911 map[4] = OMX_AUDIO_ChannelRR; 7912 break; 7913 case 6: 7914 map[0] = OMX_AUDIO_ChannelLF; 7915 map[1] = OMX_AUDIO_ChannelRF; 7916 map[2] = OMX_AUDIO_ChannelCF; 7917 map[3] = OMX_AUDIO_ChannelLFE; 7918 map[4] = OMX_AUDIO_ChannelLR; 7919 map[5] = OMX_AUDIO_ChannelRR; 7920 break; 7921 case 7: 7922 map[0] = OMX_AUDIO_ChannelLF; 7923 map[1] = OMX_AUDIO_ChannelRF; 7924 map[2] = OMX_AUDIO_ChannelCF; 7925 map[3] = OMX_AUDIO_ChannelLFE; 7926 map[4] = OMX_AUDIO_ChannelLR; 7927 map[5] = OMX_AUDIO_ChannelRR; 7928 map[6] = OMX_AUDIO_ChannelCS; 7929 break; 7930 case 8: 7931 map[0] = OMX_AUDIO_ChannelLF; 7932 map[1] = OMX_AUDIO_ChannelRF; 7933 map[2] = OMX_AUDIO_ChannelCF; 7934 map[3] = OMX_AUDIO_ChannelLFE; 7935 map[4] = OMX_AUDIO_ChannelLR; 7936 map[5] = OMX_AUDIO_ChannelRR; 7937 map[6] = OMX_AUDIO_ChannelLS; 7938 map[7] = OMX_AUDIO_ChannelRS; 7939 break; 7940 default: 7941 return -EINVAL; 7942 } 7943 7944 return OK; 7945} 7946 7947} // namespace android 7948