ACodec.cpp revision a6bfb5060824f01041f956c733fec141504c280b
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47 48#include <OMX_AudioExt.h> 49#include <OMX_VideoExt.h> 50#include <OMX_Component.h> 51#include <OMX_IndexExt.h> 52#include <OMX_AsString.h> 53 54#include "include/avc_utils.h" 55#include "include/DataConverter.h" 56#include "omx/OMXUtils.h" 57 58namespace android { 59 60enum { 61 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 62}; 63 64// OMX errors are directly mapped into status_t range if 65// there is no corresponding MediaError status code. 66// Use the statusFromOMXError(int32_t omxError) function. 67// 68// Currently this is a direct map. 69// See frameworks/native/include/media/openmax/OMX_Core.h 70// 71// Vendor OMX errors from 0x90000000 - 0x9000FFFF 72// Extension OMX errors from 0x8F000000 - 0x90000000 73// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 74// 75 76// returns true if err is a recognized OMX error code. 77// as OMX error is OMX_S32, this is an int32_t type 78static inline bool isOMXError(int32_t err) { 79 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 80} 81 82// converts an OMX error to a status_t 83static inline status_t statusFromOMXError(int32_t omxError) { 84 switch (omxError) { 85 case OMX_ErrorInvalidComponentName: 86 case OMX_ErrorComponentNotFound: 87 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 88 default: 89 return isOMXError(omxError) ? omxError : 0; // no translation required 90 } 91} 92 93// checks and converts status_t to a non-side-effect status_t 94static inline status_t makeNoSideEffectStatus(status_t err) { 95 switch (err) { 96 // the following errors have side effects and may come 97 // from other code modules. Remap for safety reasons. 98 case INVALID_OPERATION: 99 case DEAD_OBJECT: 100 return UNKNOWN_ERROR; 101 default: 102 return err; 103 } 104} 105 106struct MessageList : public RefBase { 107 MessageList() { 108 } 109 virtual ~MessageList() { 110 } 111 std::list<sp<AMessage> > &getList() { return mList; } 112private: 113 std::list<sp<AMessage> > mList; 114 115 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 116}; 117 118static sp<DataConverter> getCopyConverter() { 119 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited 120 static sp<DataConverter> sCopyConverter; // zero-inited 121 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); 122 return sCopyConverter; 123} 124 125struct CodecObserver : public BnOMXObserver { 126 CodecObserver() {} 127 128 void setNotificationMessage(const sp<AMessage> &msg) { 129 mNotify = msg; 130 } 131 132 // from IOMXObserver 133 virtual void onMessages(const std::list<omx_message> &messages) { 134 if (messages.empty()) { 135 return; 136 } 137 138 sp<AMessage> notify = mNotify->dup(); 139 bool first = true; 140 sp<MessageList> msgList = new MessageList(); 141 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 142 it != messages.cend(); ++it) { 143 const omx_message &omx_msg = *it; 144 if (first) { 145 notify->setInt32("node", omx_msg.node); 146 first = false; 147 } 148 149 sp<AMessage> msg = new AMessage; 150 msg->setInt32("type", omx_msg.type); 151 switch (omx_msg.type) { 152 case omx_message::EVENT: 153 { 154 msg->setInt32("event", omx_msg.u.event_data.event); 155 msg->setInt32("data1", omx_msg.u.event_data.data1); 156 msg->setInt32("data2", omx_msg.u.event_data.data2); 157 break; 158 } 159 160 case omx_message::EMPTY_BUFFER_DONE: 161 { 162 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 163 msg->setInt32("fence_fd", omx_msg.fenceFd); 164 break; 165 } 166 167 case omx_message::FILL_BUFFER_DONE: 168 { 169 msg->setInt32( 170 "buffer", omx_msg.u.extended_buffer_data.buffer); 171 msg->setInt32( 172 "range_offset", 173 omx_msg.u.extended_buffer_data.range_offset); 174 msg->setInt32( 175 "range_length", 176 omx_msg.u.extended_buffer_data.range_length); 177 msg->setInt32( 178 "flags", 179 omx_msg.u.extended_buffer_data.flags); 180 msg->setInt64( 181 "timestamp", 182 omx_msg.u.extended_buffer_data.timestamp); 183 msg->setInt32( 184 "fence_fd", omx_msg.fenceFd); 185 break; 186 } 187 188 case omx_message::FRAME_RENDERED: 189 { 190 msg->setInt64( 191 "media_time_us", omx_msg.u.render_data.timestamp); 192 msg->setInt64( 193 "system_nano", omx_msg.u.render_data.nanoTime); 194 break; 195 } 196 197 default: 198 ALOGE("Unrecognized message type: %d", omx_msg.type); 199 break; 200 } 201 msgList->getList().push_back(msg); 202 } 203 notify->setObject("messages", msgList); 204 notify->post(); 205 } 206 207protected: 208 virtual ~CodecObserver() {} 209 210private: 211 sp<AMessage> mNotify; 212 213 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 214}; 215 216//////////////////////////////////////////////////////////////////////////////// 217 218struct ACodec::BaseState : public AState { 219 BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 220 221protected: 222 enum PortMode { 223 KEEP_BUFFERS, 224 RESUBMIT_BUFFERS, 225 FREE_BUFFERS, 226 }; 227 228 ACodec *mCodec; 229 230 virtual PortMode getPortMode(OMX_U32 portIndex); 231 232 virtual bool onMessageReceived(const sp<AMessage> &msg); 233 234 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 235 236 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 237 virtual void onInputBufferFilled(const sp<AMessage> &msg); 238 239 void postFillThisBuffer(BufferInfo *info); 240 241private: 242 // Handles an OMX message. Returns true iff message was handled. 243 bool onOMXMessage(const sp<AMessage> &msg); 244 245 // Handles a list of messages. Returns true iff messages were handled. 246 bool onOMXMessageList(const sp<AMessage> &msg); 247 248 // returns true iff this message is for this component and the component is alive 249 bool checkOMXMessage(const sp<AMessage> &msg); 250 251 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 252 253 bool onOMXFillBufferDone( 254 IOMX::buffer_id bufferID, 255 size_t rangeOffset, size_t rangeLength, 256 OMX_U32 flags, 257 int64_t timeUs, 258 int fenceFd); 259 260 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 261 262 void getMoreInputDataIfPossible(); 263 264 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 265}; 266 267//////////////////////////////////////////////////////////////////////////////// 268 269struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 270 DeathNotifier(const sp<AMessage> ¬ify) 271 : mNotify(notify) { 272 } 273 274 virtual void binderDied(const wp<IBinder> &) { 275 mNotify->post(); 276 } 277 278protected: 279 virtual ~DeathNotifier() {} 280 281private: 282 sp<AMessage> mNotify; 283 284 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 285}; 286 287struct ACodec::UninitializedState : public ACodec::BaseState { 288 UninitializedState(ACodec *codec); 289 290protected: 291 virtual bool onMessageReceived(const sp<AMessage> &msg); 292 virtual void stateEntered(); 293 294private: 295 void onSetup(const sp<AMessage> &msg); 296 bool onAllocateComponent(const sp<AMessage> &msg); 297 298 sp<DeathNotifier> mDeathNotifier; 299 300 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 301}; 302 303//////////////////////////////////////////////////////////////////////////////// 304 305struct ACodec::LoadedState : public ACodec::BaseState { 306 LoadedState(ACodec *codec); 307 308protected: 309 virtual bool onMessageReceived(const sp<AMessage> &msg); 310 virtual void stateEntered(); 311 312private: 313 friend struct ACodec::UninitializedState; 314 315 bool onConfigureComponent(const sp<AMessage> &msg); 316 void onCreateInputSurface(const sp<AMessage> &msg); 317 void onSetInputSurface(const sp<AMessage> &msg); 318 void onStart(); 319 void onShutdown(bool keepComponentAllocated); 320 321 status_t setupInputSurface(); 322 323 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 324}; 325 326//////////////////////////////////////////////////////////////////////////////// 327 328struct ACodec::LoadedToIdleState : public ACodec::BaseState { 329 LoadedToIdleState(ACodec *codec); 330 331protected: 332 virtual bool onMessageReceived(const sp<AMessage> &msg); 333 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 334 virtual void stateEntered(); 335 336private: 337 status_t allocateBuffers(); 338 339 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 340}; 341 342//////////////////////////////////////////////////////////////////////////////// 343 344struct ACodec::IdleToExecutingState : public ACodec::BaseState { 345 IdleToExecutingState(ACodec *codec); 346 347protected: 348 virtual bool onMessageReceived(const sp<AMessage> &msg); 349 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 350 virtual void stateEntered(); 351 352private: 353 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 354}; 355 356//////////////////////////////////////////////////////////////////////////////// 357 358struct ACodec::ExecutingState : public ACodec::BaseState { 359 ExecutingState(ACodec *codec); 360 361 void submitRegularOutputBuffers(); 362 void submitOutputMetaBuffers(); 363 void submitOutputBuffers(); 364 365 // Submit output buffers to the decoder, submit input buffers to client 366 // to fill with data. 367 void resume(); 368 369 // Returns true iff input and output buffers are in play. 370 bool active() const { return mActive; } 371 372protected: 373 virtual PortMode getPortMode(OMX_U32 portIndex); 374 virtual bool onMessageReceived(const sp<AMessage> &msg); 375 virtual void stateEntered(); 376 377 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 378 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 379 380private: 381 bool mActive; 382 383 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 384}; 385 386//////////////////////////////////////////////////////////////////////////////// 387 388struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 389 OutputPortSettingsChangedState(ACodec *codec); 390 391protected: 392 virtual PortMode getPortMode(OMX_U32 portIndex); 393 virtual bool onMessageReceived(const sp<AMessage> &msg); 394 virtual void stateEntered(); 395 396 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 397 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 398 399private: 400 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 401}; 402 403//////////////////////////////////////////////////////////////////////////////// 404 405struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 406 ExecutingToIdleState(ACodec *codec); 407 408protected: 409 virtual bool onMessageReceived(const sp<AMessage> &msg); 410 virtual void stateEntered(); 411 412 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 413 414 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 415 virtual void onInputBufferFilled(const sp<AMessage> &msg); 416 417private: 418 void changeStateIfWeOwnAllBuffers(); 419 420 bool mComponentNowIdle; 421 422 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 423}; 424 425//////////////////////////////////////////////////////////////////////////////// 426 427struct ACodec::IdleToLoadedState : public ACodec::BaseState { 428 IdleToLoadedState(ACodec *codec); 429 430protected: 431 virtual bool onMessageReceived(const sp<AMessage> &msg); 432 virtual void stateEntered(); 433 434 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 435 436private: 437 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 438}; 439 440//////////////////////////////////////////////////////////////////////////////// 441 442struct ACodec::FlushingState : public ACodec::BaseState { 443 FlushingState(ACodec *codec); 444 445protected: 446 virtual bool onMessageReceived(const sp<AMessage> &msg); 447 virtual void stateEntered(); 448 449 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 450 451 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 452 virtual void onInputBufferFilled(const sp<AMessage> &msg); 453 454private: 455 bool mFlushComplete[2]; 456 457 void changeStateIfWeOwnAllBuffers(); 458 459 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 460}; 461 462//////////////////////////////////////////////////////////////////////////////// 463 464void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 465 if (mFenceFd >= 0) { 466 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 467 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 468 } 469 mFenceFd = fenceFd; 470 mIsReadFence = false; 471} 472 473void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 474 if (mFenceFd >= 0) { 475 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 476 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 477 } 478 mFenceFd = fenceFd; 479 mIsReadFence = true; 480} 481 482void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 483 if (mFenceFd >= 0 && mIsReadFence) { 484 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 485 } 486} 487 488void ACodec::BufferInfo::checkReadFence(const char *dbg) { 489 if (mFenceFd >= 0 && !mIsReadFence) { 490 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 491 } 492} 493 494//////////////////////////////////////////////////////////////////////////////// 495 496ACodec::ACodec() 497 : mQuirks(0), 498 mNode(0), 499 mUsingNativeWindow(false), 500 mNativeWindowUsageBits(0), 501 mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN), 502 mIsVideo(false), 503 mIsEncoder(false), 504 mFatalError(false), 505 mShutdownInProgress(false), 506 mExplicitShutdown(false), 507 mIsLegacyVP9Decoder(false), 508 mEncoderDelay(0), 509 mEncoderPadding(0), 510 mRotationDegrees(0), 511 mChannelMaskPresent(false), 512 mChannelMask(0), 513 mDequeueCounter(0), 514 mInputMetadataType(kMetadataBufferTypeInvalid), 515 mOutputMetadataType(kMetadataBufferTypeInvalid), 516 mLegacyAdaptiveExperiment(false), 517 mMetadataBuffersToSubmit(0), 518 mNumUndequeuedBuffers(0), 519 mRepeatFrameDelayUs(-1ll), 520 mMaxPtsGapUs(-1ll), 521 mMaxFps(-1), 522 mTimePerFrameUs(-1ll), 523 mTimePerCaptureUs(-1ll), 524 mCreateInputBuffersSuspended(false), 525 mTunneled(false), 526 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0), 527 mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0) { 528 mUninitializedState = new UninitializedState(this); 529 mLoadedState = new LoadedState(this); 530 mLoadedToIdleState = new LoadedToIdleState(this); 531 mIdleToExecutingState = new IdleToExecutingState(this); 532 mExecutingState = new ExecutingState(this); 533 534 mOutputPortSettingsChangedState = 535 new OutputPortSettingsChangedState(this); 536 537 mExecutingToIdleState = new ExecutingToIdleState(this); 538 mIdleToLoadedState = new IdleToLoadedState(this); 539 mFlushingState = new FlushingState(this); 540 541 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 542 mInputEOSResult = OK; 543 544 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 545 546 changeState(mUninitializedState); 547} 548 549ACodec::~ACodec() { 550} 551 552void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 553 mNotify = msg; 554} 555 556void ACodec::initiateSetup(const sp<AMessage> &msg) { 557 msg->setWhat(kWhatSetup); 558 msg->setTarget(this); 559 msg->post(); 560} 561 562void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 563 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 564 msg->setMessage("params", params); 565 msg->post(); 566} 567 568void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 569 msg->setWhat(kWhatAllocateComponent); 570 msg->setTarget(this); 571 msg->post(); 572} 573 574void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 575 msg->setWhat(kWhatConfigureComponent); 576 msg->setTarget(this); 577 msg->post(); 578} 579 580status_t ACodec::setSurface(const sp<Surface> &surface) { 581 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 582 msg->setObject("surface", surface); 583 584 sp<AMessage> response; 585 status_t err = msg->postAndAwaitResponse(&response); 586 587 if (err == OK) { 588 (void)response->findInt32("err", &err); 589 } 590 return err; 591} 592 593void ACodec::initiateCreateInputSurface() { 594 (new AMessage(kWhatCreateInputSurface, this))->post(); 595} 596 597void ACodec::initiateSetInputSurface( 598 const sp<PersistentSurface> &surface) { 599 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 600 msg->setObject("input-surface", surface); 601 msg->post(); 602} 603 604void ACodec::signalEndOfInputStream() { 605 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 606} 607 608void ACodec::initiateStart() { 609 (new AMessage(kWhatStart, this))->post(); 610} 611 612void ACodec::signalFlush() { 613 ALOGV("[%s] signalFlush", mComponentName.c_str()); 614 (new AMessage(kWhatFlush, this))->post(); 615} 616 617void ACodec::signalResume() { 618 (new AMessage(kWhatResume, this))->post(); 619} 620 621void ACodec::initiateShutdown(bool keepComponentAllocated) { 622 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 623 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 624 msg->post(); 625 if (!keepComponentAllocated) { 626 // ensure shutdown completes in 3 seconds 627 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 628 } 629} 630 631void ACodec::signalRequestIDRFrame() { 632 (new AMessage(kWhatRequestIDRFrame, this))->post(); 633} 634 635// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 636// Some codecs may return input buffers before having them processed. 637// This causes a halt if we already signaled an EOS on the input 638// port. For now keep submitting an output buffer if there was an 639// EOS on the input port, but not yet on the output port. 640void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 641 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 642 mMetadataBuffersToSubmit > 0) { 643 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 644 } 645} 646 647status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 648 // allow keeping unset surface 649 if (surface == NULL) { 650 if (mNativeWindow != NULL) { 651 ALOGW("cannot unset a surface"); 652 return INVALID_OPERATION; 653 } 654 return OK; 655 } 656 657 // cannot switch from bytebuffers to surface 658 if (mNativeWindow == NULL) { 659 ALOGW("component was not configured with a surface"); 660 return INVALID_OPERATION; 661 } 662 663 ANativeWindow *nativeWindow = surface.get(); 664 // if we have not yet started the codec, we can simply set the native window 665 if (mBuffers[kPortIndexInput].size() == 0) { 666 mNativeWindow = surface; 667 return OK; 668 } 669 670 // we do not support changing a tunneled surface after start 671 if (mTunneled) { 672 ALOGW("cannot change tunneled surface"); 673 return INVALID_OPERATION; 674 } 675 676 int usageBits = 0; 677 // no need to reconnect as we will not dequeue all buffers 678 status_t err = setupNativeWindowSizeFormatAndUsage( 679 nativeWindow, &usageBits, 680 !storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment /* reconnect */); 681 if (err != OK) { 682 return err; 683 } 684 685 int ignoredFlags = kVideoGrallocUsage; 686 // New output surface is not allowed to add new usage flag except ignored ones. 687 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 688 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 689 return BAD_VALUE; 690 } 691 692 // get min undequeued count. We cannot switch to a surface that has a higher 693 // undequeued count than we allocated. 694 int minUndequeuedBuffers = 0; 695 err = nativeWindow->query( 696 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 697 &minUndequeuedBuffers); 698 if (err != 0) { 699 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 700 strerror(-err), -err); 701 return err; 702 } 703 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 704 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 705 minUndequeuedBuffers, mNumUndequeuedBuffers); 706 return BAD_VALUE; 707 } 708 709 // we cannot change the number of output buffers while OMX is running 710 // set up surface to the same count 711 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 712 ALOGV("setting up surface for %zu buffers", buffers.size()); 713 714 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 715 if (err != 0) { 716 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 717 -err); 718 return err; 719 } 720 721 // need to enable allocation when attaching 722 surface->getIGraphicBufferProducer()->allowAllocation(true); 723 724 // for meta data mode, we move dequeud buffers to the new surface. 725 // for non-meta mode, we must move all registered buffers 726 for (size_t i = 0; i < buffers.size(); ++i) { 727 const BufferInfo &info = buffers[i]; 728 // skip undequeued buffers for meta data mode 729 if (storingMetadataInDecodedBuffers() 730 && !mLegacyAdaptiveExperiment 731 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 732 ALOGV("skipping buffer"); 733 continue; 734 } 735 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 736 737 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 738 if (err != OK) { 739 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 740 info.mGraphicBuffer->getNativeBuffer(), 741 strerror(-err), -err); 742 return err; 743 } 744 } 745 746 // cancel undequeued buffers to new surface 747 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 748 for (size_t i = 0; i < buffers.size(); ++i) { 749 BufferInfo &info = buffers.editItemAt(i); 750 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 751 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 752 err = nativeWindow->cancelBuffer( 753 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 754 info.mFenceFd = -1; 755 if (err != OK) { 756 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 757 info.mGraphicBuffer->getNativeBuffer(), 758 strerror(-err), -err); 759 return err; 760 } 761 } 762 } 763 // disallow further allocation 764 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 765 } 766 767 // push blank buffers to previous window if requested 768 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 769 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 770 } 771 772 mNativeWindow = nativeWindow; 773 mNativeWindowUsageBits = usageBits; 774 return OK; 775} 776 777status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 778 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 779 780 CHECK(mDealer[portIndex] == NULL); 781 CHECK(mBuffers[portIndex].isEmpty()); 782 783 status_t err; 784 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 785 if (storingMetadataInDecodedBuffers()) { 786 err = allocateOutputMetadataBuffers(); 787 } else { 788 err = allocateOutputBuffersFromNativeWindow(); 789 } 790 } else { 791 OMX_PARAM_PORTDEFINITIONTYPE def; 792 InitOMXParams(&def); 793 def.nPortIndex = portIndex; 794 795 err = mOMX->getParameter( 796 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 797 798 if (err == OK) { 799 MetadataBufferType type = 800 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 801 size_t bufSize = def.nBufferSize; 802 if (type == kMetadataBufferTypeANWBuffer) { 803 bufSize = sizeof(VideoNativeMetadata); 804 } else if (type == kMetadataBufferTypeNativeHandleSource) { 805 bufSize = sizeof(VideoNativeHandleMetadata); 806 } 807 808 // If using gralloc or native source input metadata buffers, allocate largest 809 // metadata size as we prefer to generate native source metadata, but component 810 // may require gralloc source. For camera source, allocate at least enough 811 // size for native metadata buffers. 812 size_t allottedSize = bufSize; 813 if (portIndex == kPortIndexInput && type == kMetadataBufferTypeANWBuffer) { 814 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 815 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 816 bufSize = max(bufSize, sizeof(VideoNativeMetadata)); 817 } 818 819 size_t conversionBufferSize = 0; 820 821 sp<DataConverter> converter = mConverter[portIndex]; 822 if (converter != NULL) { 823 // here we assume sane conversions of max 4:1, so result fits in int32 824 if (portIndex == kPortIndexInput) { 825 conversionBufferSize = converter->sourceSize(bufSize); 826 } else { 827 conversionBufferSize = converter->targetSize(bufSize); 828 } 829 } 830 831 size_t alignment = MemoryDealer::getAllocationAlignment(); 832 833 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", 834 mComponentName.c_str(), 835 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 836 portIndex == kPortIndexInput ? "input" : "output"); 837 838 // verify buffer sizes to avoid overflow in align() 839 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { 840 ALOGE("b/22885421"); 841 return NO_MEMORY; 842 } 843 844 // don't modify bufSize as OMX may not expect it to increase after negotiation 845 size_t alignedSize = align(bufSize, alignment); 846 size_t alignedConvSize = align(conversionBufferSize, alignment); 847 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { 848 ALOGE("b/22885421"); 849 return NO_MEMORY; 850 } 851 852 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); 853 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 854 855 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 856 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 857 if (mem == NULL || mem->pointer() == NULL) { 858 return NO_MEMORY; 859 } 860 861 BufferInfo info; 862 info.mStatus = BufferInfo::OWNED_BY_US; 863 info.mFenceFd = -1; 864 info.mRenderInfo = NULL; 865 info.mNativeHandle = NULL; 866 867 uint32_t requiresAllocateBufferBit = 868 (portIndex == kPortIndexInput) 869 ? kRequiresAllocateBufferOnInputPorts 870 : kRequiresAllocateBufferOnOutputPorts; 871 872 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 873 mem.clear(); 874 875 void *ptr = NULL; 876 sp<NativeHandle> native_handle; 877 err = mOMX->allocateSecureBuffer( 878 mNode, portIndex, bufSize, &info.mBufferID, 879 &ptr, &native_handle); 880 881 // TRICKY: this representation is unorthodox, but ACodec requires 882 // an ABuffer with a proper size to validate range offsets and lengths. 883 // Since mData is never referenced for secure input, it is used to store 884 // either the pointer to the secure buffer, or the opaque handle as on 885 // some devices ptr is actually an opaque handle, not a pointer. 886 887 // TRICKY2: use native handle as the base of the ABuffer if received one, 888 // because Widevine source only receives these base addresses. 889 const native_handle_t *native_handle_ptr = 890 native_handle == NULL ? NULL : native_handle->handle(); 891 info.mData = new ABuffer( 892 ptr != NULL ? ptr : (void *)native_handle_ptr, bufSize); 893 info.mNativeHandle = native_handle; 894 info.mCodecData = info.mData; 895 } else if (mQuirks & requiresAllocateBufferBit) { 896 err = mOMX->allocateBufferWithBackup( 897 mNode, portIndex, mem, &info.mBufferID, allottedSize); 898 } else { 899 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize); 900 } 901 902 if (mem != NULL) { 903 info.mCodecData = new ABuffer(mem->pointer(), bufSize); 904 info.mCodecRef = mem; 905 906 if (type == kMetadataBufferTypeANWBuffer) { 907 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 908 } 909 910 // if we require conversion, allocate conversion buffer for client use; 911 // otherwise, reuse codec buffer 912 if (mConverter[portIndex] != NULL) { 913 CHECK_GT(conversionBufferSize, (size_t)0); 914 mem = mDealer[portIndex]->allocate(conversionBufferSize); 915 if (mem == NULL|| mem->pointer() == NULL) { 916 return NO_MEMORY; 917 } 918 info.mData = new ABuffer(mem->pointer(), conversionBufferSize); 919 info.mMemRef = mem; 920 } else { 921 info.mData = info.mCodecData; 922 info.mMemRef = info.mCodecRef; 923 } 924 } 925 926 mBuffers[portIndex].push(info); 927 } 928 } 929 } 930 931 if (err != OK) { 932 return err; 933 } 934 935 sp<AMessage> notify = mNotify->dup(); 936 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 937 938 notify->setInt32("portIndex", portIndex); 939 940 sp<PortDescription> desc = new PortDescription; 941 942 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 943 const BufferInfo &info = mBuffers[portIndex][i]; 944 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef); 945 } 946 947 notify->setObject("portDesc", desc); 948 notify->post(); 949 950 return OK; 951} 952 953status_t ACodec::setupNativeWindowSizeFormatAndUsage( 954 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */, 955 bool reconnect) { 956 OMX_PARAM_PORTDEFINITIONTYPE def; 957 InitOMXParams(&def); 958 def.nPortIndex = kPortIndexOutput; 959 960 status_t err = mOMX->getParameter( 961 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 962 963 if (err != OK) { 964 return err; 965 } 966 967 OMX_U32 usage = 0; 968 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 969 if (err != 0) { 970 ALOGW("querying usage flags from OMX IL component failed: %d", err); 971 // XXX: Currently this error is logged, but not fatal. 972 usage = 0; 973 } 974 int omxUsage = usage; 975 976 if (mFlags & kFlagIsGrallocUsageProtected) { 977 usage |= GRALLOC_USAGE_PROTECTED; 978 } 979 980 usage |= kVideoGrallocUsage; 981 *finalUsage = usage; 982 983 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 984 mLastNativeWindowDataSpace = HAL_DATASPACE_UNKNOWN; 985 986 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 987 return setNativeWindowSizeFormatAndUsage( 988 nativeWindow, 989 def.format.video.nFrameWidth, 990 def.format.video.nFrameHeight, 991 def.format.video.eColorFormat, 992 mRotationDegrees, 993 usage, 994 reconnect); 995} 996 997status_t ACodec::configureOutputBuffersFromNativeWindow( 998 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 999 OMX_U32 *minUndequeuedBuffers, bool preregister) { 1000 1001 OMX_PARAM_PORTDEFINITIONTYPE def; 1002 InitOMXParams(&def); 1003 def.nPortIndex = kPortIndexOutput; 1004 1005 status_t err = mOMX->getParameter( 1006 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1007 1008 if (err == OK) { 1009 err = setupNativeWindowSizeFormatAndUsage( 1010 mNativeWindow.get(), &mNativeWindowUsageBits, preregister /* reconnect */); 1011 } 1012 if (err != OK) { 1013 mNativeWindowUsageBits = 0; 1014 return err; 1015 } 1016 1017 // Exits here for tunneled video playback codecs -- i.e. skips native window 1018 // buffer allocation step as this is managed by the tunneled OMX omponent 1019 // itself and explicitly sets def.nBufferCountActual to 0. 1020 if (mTunneled) { 1021 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 1022 def.nBufferCountActual = 0; 1023 err = mOMX->setParameter( 1024 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1025 1026 *minUndequeuedBuffers = 0; 1027 *bufferCount = 0; 1028 *bufferSize = 0; 1029 return err; 1030 } 1031 1032 *minUndequeuedBuffers = 0; 1033 err = mNativeWindow->query( 1034 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 1035 (int *)minUndequeuedBuffers); 1036 1037 if (err != 0) { 1038 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 1039 strerror(-err), -err); 1040 return err; 1041 } 1042 1043 // FIXME: assume that surface is controlled by app (native window 1044 // returns the number for the case when surface is not controlled by app) 1045 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 1046 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 1047 1048 // Use conservative allocation while also trying to reduce starvation 1049 // 1050 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 1051 // minimum needed for the consumer to be able to work 1052 // 2. try to allocate two (2) additional buffers to reduce starvation from 1053 // the consumer 1054 // plus an extra buffer to account for incorrect minUndequeuedBufs 1055 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 1056 OMX_U32 newBufferCount = 1057 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1058 def.nBufferCountActual = newBufferCount; 1059 err = mOMX->setParameter( 1060 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1061 1062 if (err == OK) { 1063 *minUndequeuedBuffers += extraBuffers; 1064 break; 1065 } 1066 1067 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1068 mComponentName.c_str(), newBufferCount, err); 1069 /* exit condition */ 1070 if (extraBuffers == 0) { 1071 return err; 1072 } 1073 } 1074 1075 err = native_window_set_buffer_count( 1076 mNativeWindow.get(), def.nBufferCountActual); 1077 1078 if (err != 0) { 1079 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1080 -err); 1081 return err; 1082 } 1083 1084 *bufferCount = def.nBufferCountActual; 1085 *bufferSize = def.nBufferSize; 1086 return err; 1087} 1088 1089status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1090 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1091 status_t err = configureOutputBuffersFromNativeWindow( 1092 &bufferCount, &bufferSize, &minUndequeuedBuffers, true /* preregister */); 1093 if (err != 0) 1094 return err; 1095 mNumUndequeuedBuffers = minUndequeuedBuffers; 1096 1097 if (!storingMetadataInDecodedBuffers()) { 1098 static_cast<Surface*>(mNativeWindow.get()) 1099 ->getIGraphicBufferProducer()->allowAllocation(true); 1100 } 1101 1102 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1103 "output port", 1104 mComponentName.c_str(), bufferCount, bufferSize); 1105 1106 // Dequeue buffers and send them to OMX 1107 for (OMX_U32 i = 0; i < bufferCount; i++) { 1108 ANativeWindowBuffer *buf; 1109 int fenceFd; 1110 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1111 if (err != 0) { 1112 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1113 break; 1114 } 1115 1116 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1117 BufferInfo info; 1118 info.mStatus = BufferInfo::OWNED_BY_US; 1119 info.mFenceFd = fenceFd; 1120 info.mIsReadFence = false; 1121 info.mRenderInfo = NULL; 1122 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 1123 info.mCodecData = info.mData; 1124 info.mGraphicBuffer = graphicBuffer; 1125 mBuffers[kPortIndexOutput].push(info); 1126 1127 IOMX::buffer_id bufferId; 1128 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 1129 &bufferId); 1130 if (err != 0) { 1131 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1132 "%d", i, err); 1133 break; 1134 } 1135 1136 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1137 1138 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1139 mComponentName.c_str(), 1140 bufferId, graphicBuffer.get()); 1141 } 1142 1143 OMX_U32 cancelStart; 1144 OMX_U32 cancelEnd; 1145 1146 if (err != 0) { 1147 // If an error occurred while dequeuing we need to cancel any buffers 1148 // that were dequeued. 1149 cancelStart = 0; 1150 cancelEnd = mBuffers[kPortIndexOutput].size(); 1151 } else { 1152 // Return the required minimum undequeued buffers to the native window. 1153 cancelStart = bufferCount - minUndequeuedBuffers; 1154 cancelEnd = bufferCount; 1155 } 1156 1157 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1158 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1159 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1160 status_t error = cancelBufferToNativeWindow(info); 1161 if (err == 0) { 1162 err = error; 1163 } 1164 } 1165 } 1166 1167 if (!storingMetadataInDecodedBuffers()) { 1168 static_cast<Surface*>(mNativeWindow.get()) 1169 ->getIGraphicBufferProducer()->allowAllocation(false); 1170 } 1171 1172 return err; 1173} 1174 1175status_t ACodec::allocateOutputMetadataBuffers() { 1176 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1177 status_t err = configureOutputBuffersFromNativeWindow( 1178 &bufferCount, &bufferSize, &minUndequeuedBuffers, 1179 mLegacyAdaptiveExperiment /* preregister */); 1180 if (err != 0) 1181 return err; 1182 mNumUndequeuedBuffers = minUndequeuedBuffers; 1183 1184 ALOGV("[%s] Allocating %u meta buffers on output port", 1185 mComponentName.c_str(), bufferCount); 1186 1187 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1188 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1189 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1190 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1191 1192 // Dequeue buffers and send them to OMX 1193 for (OMX_U32 i = 0; i < bufferCount; i++) { 1194 BufferInfo info; 1195 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1196 info.mFenceFd = -1; 1197 info.mRenderInfo = NULL; 1198 info.mGraphicBuffer = NULL; 1199 info.mDequeuedAt = mDequeueCounter; 1200 1201 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1202 if (mem == NULL || mem->pointer() == NULL) { 1203 return NO_MEMORY; 1204 } 1205 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1206 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1207 } 1208 info.mData = new ABuffer(mem->pointer(), mem->size()); 1209 info.mMemRef = mem; 1210 info.mCodecData = info.mData; 1211 info.mCodecRef = mem; 1212 1213 // we use useBuffer for metadata regardless of quirks 1214 err = mOMX->useBuffer( 1215 mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1216 mBuffers[kPortIndexOutput].push(info); 1217 1218 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1219 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1220 } 1221 1222 if (mLegacyAdaptiveExperiment) { 1223 // preallocate and preregister buffers 1224 static_cast<Surface *>(mNativeWindow.get()) 1225 ->getIGraphicBufferProducer()->allowAllocation(true); 1226 1227 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1228 "output port", 1229 mComponentName.c_str(), bufferCount, bufferSize); 1230 1231 // Dequeue buffers then cancel them all 1232 for (OMX_U32 i = 0; i < bufferCount; i++) { 1233 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1234 1235 ANativeWindowBuffer *buf; 1236 int fenceFd; 1237 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1238 if (err != 0) { 1239 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1240 break; 1241 } 1242 1243 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1244 mOMX->updateGraphicBufferInMeta( 1245 mNode, kPortIndexOutput, graphicBuffer, info->mBufferID); 1246 info->mStatus = BufferInfo::OWNED_BY_US; 1247 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1248 info->mGraphicBuffer = graphicBuffer; 1249 } 1250 1251 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1252 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1253 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1254 status_t error = cancelBufferToNativeWindow(info); 1255 if (err == OK) { 1256 err = error; 1257 } 1258 } 1259 } 1260 1261 static_cast<Surface*>(mNativeWindow.get()) 1262 ->getIGraphicBufferProducer()->allowAllocation(false); 1263 } 1264 1265 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1266 return err; 1267} 1268 1269status_t ACodec::submitOutputMetadataBuffer() { 1270 CHECK(storingMetadataInDecodedBuffers()); 1271 if (mMetadataBuffersToSubmit == 0) 1272 return OK; 1273 1274 BufferInfo *info = dequeueBufferFromNativeWindow(); 1275 if (info == NULL) { 1276 return ERROR_IO; 1277 } 1278 1279 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1280 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1281 1282 --mMetadataBuffersToSubmit; 1283 info->checkWriteFence("submitOutputMetadataBuffer"); 1284 status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd); 1285 info->mFenceFd = -1; 1286 if (err == OK) { 1287 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1288 } 1289 1290 return err; 1291} 1292 1293status_t ACodec::waitForFence(int fd, const char *dbg ) { 1294 status_t res = OK; 1295 if (fd >= 0) { 1296 sp<Fence> fence = new Fence(fd); 1297 res = fence->wait(IOMX::kFenceTimeoutMs); 1298 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1299 } 1300 return res; 1301} 1302 1303// static 1304const char *ACodec::_asString(BufferInfo::Status s) { 1305 switch (s) { 1306 case BufferInfo::OWNED_BY_US: return "OUR"; 1307 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1308 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1309 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1310 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1311 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1312 default: return "?"; 1313 } 1314} 1315 1316void ACodec::dumpBuffers(OMX_U32 portIndex) { 1317 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1318 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1319 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1320 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1321 const BufferInfo &info = mBuffers[portIndex][i]; 1322 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1323 i, info.mBufferID, info.mGraphicBuffer.get(), 1324 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1325 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1326 } 1327} 1328 1329status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1330 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1331 1332 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1333 mComponentName.c_str(), info->mBufferID); 1334 1335 info->checkWriteFence("cancelBufferToNativeWindow"); 1336 int err = mNativeWindow->cancelBuffer( 1337 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1338 info->mFenceFd = -1; 1339 1340 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1341 mComponentName.c_str(), info->mBufferID); 1342 // change ownership even if cancelBuffer fails 1343 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1344 1345 return err; 1346} 1347 1348void ACodec::updateRenderInfoForDequeuedBuffer( 1349 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1350 1351 info->mRenderInfo = 1352 mRenderTracker.updateInfoForDequeuedBuffer( 1353 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1354 1355 // check for any fences already signaled 1356 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1357} 1358 1359void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1360 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1361 mRenderTracker.dumpRenderQueue(); 1362 } 1363} 1364 1365void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1366 sp<AMessage> msg = mNotify->dup(); 1367 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1368 std::list<FrameRenderTracker::Info> done = 1369 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1370 1371 // unlink untracked frames 1372 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1373 it != done.cend(); ++it) { 1374 ssize_t index = it->getIndex(); 1375 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1376 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1377 } else if (index >= 0) { 1378 // THIS SHOULD NEVER HAPPEN 1379 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1380 } 1381 } 1382 1383 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1384 msg->post(); 1385 } 1386} 1387 1388ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1389 ANativeWindowBuffer *buf; 1390 CHECK(mNativeWindow.get() != NULL); 1391 1392 if (mTunneled) { 1393 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1394 " video playback mode mode!"); 1395 return NULL; 1396 } 1397 1398 if (mFatalError) { 1399 ALOGW("not dequeuing from native window due to fatal error"); 1400 return NULL; 1401 } 1402 1403 int fenceFd = -1; 1404 do { 1405 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1406 if (err != 0) { 1407 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1408 return NULL; 1409 } 1410 1411 bool stale = false; 1412 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1413 i--; 1414 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1415 1416 if (info->mGraphicBuffer != NULL && 1417 info->mGraphicBuffer->handle == buf->handle) { 1418 // Since consumers can attach buffers to BufferQueues, it is possible 1419 // that a known yet stale buffer can return from a surface that we 1420 // once used. We can simply ignore this as we have already dequeued 1421 // this buffer properly. NOTE: this does not eliminate all cases, 1422 // e.g. it is possible that we have queued the valid buffer to the 1423 // NW, and a stale copy of the same buffer gets dequeued - which will 1424 // be treated as the valid buffer by ACodec. 1425 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1426 ALOGI("dequeued stale buffer %p. discarding", buf); 1427 stale = true; 1428 break; 1429 } 1430 1431 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1432 info->mStatus = BufferInfo::OWNED_BY_US; 1433 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1434 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1435 return info; 1436 } 1437 } 1438 1439 // It is also possible to receive a previously unregistered buffer 1440 // in non-meta mode. These should be treated as stale buffers. The 1441 // same is possible in meta mode, in which case, it will be treated 1442 // as a normal buffer, which is not desirable. 1443 // TODO: fix this. 1444 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1445 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1446 stale = true; 1447 } 1448 if (stale) { 1449 // TODO: detach stale buffer, but there is no API yet to do it. 1450 buf = NULL; 1451 } 1452 } while (buf == NULL); 1453 1454 // get oldest undequeued buffer 1455 BufferInfo *oldest = NULL; 1456 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1457 i--; 1458 BufferInfo *info = 1459 &mBuffers[kPortIndexOutput].editItemAt(i); 1460 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1461 (oldest == NULL || 1462 // avoid potential issues from counter rolling over 1463 mDequeueCounter - info->mDequeuedAt > 1464 mDequeueCounter - oldest->mDequeuedAt)) { 1465 oldest = info; 1466 } 1467 } 1468 1469 // it is impossible dequeue a buffer when there are no buffers with ANW 1470 CHECK(oldest != NULL); 1471 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1472 // while loop above does not complete 1473 CHECK(storingMetadataInDecodedBuffers()); 1474 1475 // discard buffer in LRU info and replace with new buffer 1476 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1477 oldest->mStatus = BufferInfo::OWNED_BY_US; 1478 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1479 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1480 oldest->mRenderInfo = NULL; 1481 1482 mOMX->updateGraphicBufferInMeta( 1483 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 1484 oldest->mBufferID); 1485 1486 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1487 VideoGrallocMetadata *grallocMeta = 1488 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base()); 1489 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1490 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1491 mDequeueCounter - oldest->mDequeuedAt, 1492 (void *)(uintptr_t)grallocMeta->pHandle, 1493 oldest->mGraphicBuffer->handle, oldest->mData->base()); 1494 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1495 VideoNativeMetadata *nativeMeta = 1496 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base()); 1497 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1498 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1499 mDequeueCounter - oldest->mDequeuedAt, 1500 (void *)(uintptr_t)nativeMeta->pBuffer, 1501 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base()); 1502 } 1503 1504 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1505 return oldest; 1506} 1507 1508status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1509 status_t err = OK; 1510 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1511 i--; 1512 status_t err2 = freeBuffer(portIndex, i); 1513 if (err == OK) { 1514 err = err2; 1515 } 1516 } 1517 1518 // clear mDealer even on an error 1519 mDealer[portIndex].clear(); 1520 return err; 1521} 1522 1523status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1524 status_t err = OK; 1525 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1526 i--; 1527 BufferInfo *info = 1528 &mBuffers[kPortIndexOutput].editItemAt(i); 1529 1530 // At this time some buffers may still be with the component 1531 // or being drained. 1532 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1533 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1534 status_t err2 = freeBuffer(kPortIndexOutput, i); 1535 if (err == OK) { 1536 err = err2; 1537 } 1538 } 1539 } 1540 1541 return err; 1542} 1543 1544status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1545 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1546 status_t err = OK; 1547 1548 // there should not be any fences in the metadata 1549 MetadataBufferType type = 1550 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1551 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL 1552 && info->mData->size() >= sizeof(VideoNativeMetadata)) { 1553 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; 1554 if (fenceFd >= 0) { 1555 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1556 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1557 } 1558 } 1559 1560 switch (info->mStatus) { 1561 case BufferInfo::OWNED_BY_US: 1562 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1563 (void)cancelBufferToNativeWindow(info); 1564 } 1565 // fall through 1566 1567 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1568 err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID); 1569 break; 1570 1571 default: 1572 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1573 err = FAILED_TRANSACTION; 1574 break; 1575 } 1576 1577 if (info->mFenceFd >= 0) { 1578 ::close(info->mFenceFd); 1579 } 1580 1581 if (portIndex == kPortIndexOutput) { 1582 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1583 info->mRenderInfo = NULL; 1584 } 1585 1586 // remove buffer even if mOMX->freeBuffer fails 1587 mBuffers[portIndex].removeAt(i); 1588 return err; 1589} 1590 1591ACodec::BufferInfo *ACodec::findBufferByID( 1592 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1593 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1594 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1595 1596 if (info->mBufferID == bufferID) { 1597 if (index != NULL) { 1598 *index = i; 1599 } 1600 return info; 1601 } 1602 } 1603 1604 ALOGE("Could not find buffer with ID %u", bufferID); 1605 return NULL; 1606} 1607 1608status_t ACodec::setComponentRole( 1609 bool isEncoder, const char *mime) { 1610 const char *role = getComponentRole(isEncoder, mime); 1611 if (role == NULL) { 1612 return BAD_VALUE; 1613 } 1614 status_t err = setComponentRole(mOMX, mNode, role); 1615 if (err != OK) { 1616 ALOGW("[%s] Failed to set standard component role '%s'.", 1617 mComponentName.c_str(), role); 1618 } 1619 return err; 1620} 1621 1622//static 1623const char *ACodec::getComponentRole( 1624 bool isEncoder, const char *mime) { 1625 struct MimeToRole { 1626 const char *mime; 1627 const char *decoderRole; 1628 const char *encoderRole; 1629 }; 1630 1631 static const MimeToRole kMimeToRole[] = { 1632 { MEDIA_MIMETYPE_AUDIO_MPEG, 1633 "audio_decoder.mp3", "audio_encoder.mp3" }, 1634 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I, 1635 "audio_decoder.mp1", "audio_encoder.mp1" }, 1636 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, 1637 "audio_decoder.mp2", "audio_encoder.mp2" }, 1638 { MEDIA_MIMETYPE_AUDIO_AMR_NB, 1639 "audio_decoder.amrnb", "audio_encoder.amrnb" }, 1640 { MEDIA_MIMETYPE_AUDIO_AMR_WB, 1641 "audio_decoder.amrwb", "audio_encoder.amrwb" }, 1642 { MEDIA_MIMETYPE_AUDIO_AAC, 1643 "audio_decoder.aac", "audio_encoder.aac" }, 1644 { MEDIA_MIMETYPE_AUDIO_VORBIS, 1645 "audio_decoder.vorbis", "audio_encoder.vorbis" }, 1646 { MEDIA_MIMETYPE_AUDIO_OPUS, 1647 "audio_decoder.opus", "audio_encoder.opus" }, 1648 { MEDIA_MIMETYPE_AUDIO_G711_MLAW, 1649 "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" }, 1650 { MEDIA_MIMETYPE_AUDIO_G711_ALAW, 1651 "audio_decoder.g711alaw", "audio_encoder.g711alaw" }, 1652 { MEDIA_MIMETYPE_VIDEO_AVC, 1653 "video_decoder.avc", "video_encoder.avc" }, 1654 { MEDIA_MIMETYPE_VIDEO_HEVC, 1655 "video_decoder.hevc", "video_encoder.hevc" }, 1656 { MEDIA_MIMETYPE_VIDEO_MPEG4, 1657 "video_decoder.mpeg4", "video_encoder.mpeg4" }, 1658 { MEDIA_MIMETYPE_VIDEO_H263, 1659 "video_decoder.h263", "video_encoder.h263" }, 1660 { MEDIA_MIMETYPE_VIDEO_VP8, 1661 "video_decoder.vp8", "video_encoder.vp8" }, 1662 { MEDIA_MIMETYPE_VIDEO_VP9, 1663 "video_decoder.vp9", "video_encoder.vp9" }, 1664 { MEDIA_MIMETYPE_AUDIO_RAW, 1665 "audio_decoder.raw", "audio_encoder.raw" }, 1666 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, 1667 "video_decoder.dolby-vision", "video_encoder.dolby-vision" }, 1668 { MEDIA_MIMETYPE_AUDIO_FLAC, 1669 "audio_decoder.flac", "audio_encoder.flac" }, 1670 { MEDIA_MIMETYPE_AUDIO_MSGSM, 1671 "audio_decoder.gsm", "audio_encoder.gsm" }, 1672 { MEDIA_MIMETYPE_VIDEO_MPEG2, 1673 "video_decoder.mpeg2", "video_encoder.mpeg2" }, 1674 { MEDIA_MIMETYPE_AUDIO_AC3, 1675 "audio_decoder.ac3", "audio_encoder.ac3" }, 1676 { MEDIA_MIMETYPE_AUDIO_EAC3, 1677 "audio_decoder.eac3", "audio_encoder.eac3" }, 1678 }; 1679 1680 static const size_t kNumMimeToRole = 1681 sizeof(kMimeToRole) / sizeof(kMimeToRole[0]); 1682 1683 size_t i; 1684 for (i = 0; i < kNumMimeToRole; ++i) { 1685 if (!strcasecmp(mime, kMimeToRole[i].mime)) { 1686 break; 1687 } 1688 } 1689 1690 if (i == kNumMimeToRole) { 1691 return NULL; 1692 } 1693 1694 return isEncoder ? kMimeToRole[i].encoderRole 1695 : kMimeToRole[i].decoderRole; 1696} 1697 1698//static 1699status_t ACodec::setComponentRole( 1700 const sp<IOMX> &omx, IOMX::node_id node, const char *role) { 1701 OMX_PARAM_COMPONENTROLETYPE roleParams; 1702 InitOMXParams(&roleParams); 1703 1704 strncpy((char *)roleParams.cRole, 1705 role, OMX_MAX_STRINGNAME_SIZE - 1); 1706 1707 roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0'; 1708 1709 return omx->setParameter( 1710 node, OMX_IndexParamStandardComponentRole, 1711 &roleParams, sizeof(roleParams)); 1712} 1713 1714status_t ACodec::configureCodec( 1715 const char *mime, const sp<AMessage> &msg) { 1716 int32_t encoder; 1717 if (!msg->findInt32("encoder", &encoder)) { 1718 encoder = false; 1719 } 1720 1721 sp<AMessage> inputFormat = new AMessage; 1722 sp<AMessage> outputFormat = new AMessage; 1723 mConfigFormat = msg; 1724 1725 mIsEncoder = encoder; 1726 1727 mInputMetadataType = kMetadataBufferTypeInvalid; 1728 mOutputMetadataType = kMetadataBufferTypeInvalid; 1729 1730 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1731 1732 if (err != OK) { 1733 return err; 1734 } 1735 1736 int32_t bitRate = 0; 1737 // FLAC encoder doesn't need a bitrate, other encoders do 1738 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1739 && !msg->findInt32("bitrate", &bitRate)) { 1740 return INVALID_OPERATION; 1741 } 1742 1743 // propagate bitrate to the output so that the muxer has it 1744 if (encoder && msg->findInt32("bitrate", &bitRate)) { 1745 // Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the 1746 // average bitrate. We've been setting both bitrate and max-bitrate to this same value. 1747 outputFormat->setInt32("bitrate", bitRate); 1748 outputFormat->setInt32("max-bitrate", bitRate); 1749 } 1750 1751 int32_t storeMeta; 1752 if (encoder 1753 && msg->findInt32("android._input-metadata-buffer-type", &storeMeta) 1754 && storeMeta != kMetadataBufferTypeInvalid) { 1755 mInputMetadataType = (MetadataBufferType)storeMeta; 1756 err = mOMX->storeMetaDataInBuffers( 1757 mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1758 if (err != OK) { 1759 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1760 mComponentName.c_str(), err); 1761 1762 return err; 1763 } else if (storeMeta == kMetadataBufferTypeANWBuffer 1764 && mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1765 // IOMX translates ANWBuffers to gralloc source already. 1766 mInputMetadataType = (MetadataBufferType)storeMeta; 1767 } 1768 1769 uint32_t usageBits; 1770 if (mOMX->getParameter( 1771 mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1772 &usageBits, sizeof(usageBits)) == OK) { 1773 inputFormat->setInt32( 1774 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1775 } 1776 } 1777 1778 int32_t prependSPSPPS = 0; 1779 if (encoder 1780 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1781 && prependSPSPPS != 0) { 1782 OMX_INDEXTYPE index; 1783 err = mOMX->getExtensionIndex( 1784 mNode, 1785 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1786 &index); 1787 1788 if (err == OK) { 1789 PrependSPSPPSToIDRFramesParams params; 1790 InitOMXParams(¶ms); 1791 params.bEnable = OMX_TRUE; 1792 1793 err = mOMX->setParameter( 1794 mNode, index, ¶ms, sizeof(params)); 1795 } 1796 1797 if (err != OK) { 1798 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1799 "IDR frames. (err %d)", err); 1800 1801 return err; 1802 } 1803 } 1804 1805 // Only enable metadata mode on encoder output if encoder can prepend 1806 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1807 // opaque handle, to which we don't have access. 1808 int32_t video = !strncasecmp(mime, "video/", 6); 1809 mIsVideo = video; 1810 if (encoder && video) { 1811 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1812 && msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta) 1813 && storeMeta != 0); 1814 1815 mOutputMetadataType = kMetadataBufferTypeNativeHandleSource; 1816 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType); 1817 if (err != OK) { 1818 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1819 mComponentName.c_str(), err); 1820 } 1821 1822 if (!msg->findInt64( 1823 "repeat-previous-frame-after", 1824 &mRepeatFrameDelayUs)) { 1825 mRepeatFrameDelayUs = -1ll; 1826 } 1827 1828 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1829 mMaxPtsGapUs = -1ll; 1830 } 1831 1832 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1833 mMaxFps = -1; 1834 } 1835 1836 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1837 mTimePerCaptureUs = -1ll; 1838 } 1839 1840 if (!msg->findInt32( 1841 "create-input-buffers-suspended", 1842 (int32_t*)&mCreateInputBuffersSuspended)) { 1843 mCreateInputBuffersSuspended = false; 1844 } 1845 } 1846 1847 // NOTE: we only use native window for video decoders 1848 sp<RefBase> obj; 1849 bool haveNativeWindow = msg->findObject("native-window", &obj) 1850 && obj != NULL && video && !encoder; 1851 mUsingNativeWindow = haveNativeWindow; 1852 mLegacyAdaptiveExperiment = false; 1853 if (video && !encoder) { 1854 inputFormat->setInt32("adaptive-playback", false); 1855 1856 int32_t usageProtected; 1857 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1858 if (!haveNativeWindow) { 1859 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1860 return PERMISSION_DENIED; 1861 } 1862 mFlags |= kFlagIsGrallocUsageProtected; 1863 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1864 } 1865 1866 if (mFlags & kFlagIsSecure) { 1867 // use native_handles for secure input buffers 1868 err = mOMX->enableNativeBuffers( 1869 mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1870 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1871 err = OK; // ignore error for now 1872 } 1873 } 1874 if (haveNativeWindow) { 1875 sp<ANativeWindow> nativeWindow = 1876 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1877 1878 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1879 int32_t autoFrc; 1880 if (msg->findInt32("auto-frc", &autoFrc)) { 1881 bool enabled = autoFrc; 1882 OMX_CONFIG_BOOLEANTYPE config; 1883 InitOMXParams(&config); 1884 config.bEnabled = (OMX_BOOL)enabled; 1885 status_t temp = mOMX->setConfig( 1886 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1887 &config, sizeof(config)); 1888 if (temp == OK) { 1889 outputFormat->setInt32("auto-frc", enabled); 1890 } else if (enabled) { 1891 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1892 } 1893 } 1894 // END of temporary support for automatic FRC 1895 1896 int32_t tunneled; 1897 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1898 tunneled != 0) { 1899 ALOGI("Configuring TUNNELED video playback."); 1900 mTunneled = true; 1901 1902 int32_t audioHwSync = 0; 1903 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1904 ALOGW("No Audio HW Sync provided for video tunnel"); 1905 } 1906 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1907 if (err != OK) { 1908 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1909 audioHwSync, nativeWindow.get()); 1910 return err; 1911 } 1912 1913 int32_t maxWidth = 0, maxHeight = 0; 1914 if (msg->findInt32("max-width", &maxWidth) && 1915 msg->findInt32("max-height", &maxHeight)) { 1916 1917 err = mOMX->prepareForAdaptivePlayback( 1918 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1919 if (err != OK) { 1920 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1921 mComponentName.c_str(), err); 1922 // allow failure 1923 err = OK; 1924 } else { 1925 inputFormat->setInt32("max-width", maxWidth); 1926 inputFormat->setInt32("max-height", maxHeight); 1927 inputFormat->setInt32("adaptive-playback", true); 1928 } 1929 } 1930 } else { 1931 ALOGV("Configuring CPU controlled video playback."); 1932 mTunneled = false; 1933 1934 // Explicity reset the sideband handle of the window for 1935 // non-tunneled video in case the window was previously used 1936 // for a tunneled video playback. 1937 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1938 if (err != OK) { 1939 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1940 return err; 1941 } 1942 1943 // Always try to enable dynamic output buffers on native surface 1944 mOutputMetadataType = kMetadataBufferTypeANWBuffer; 1945 err = mOMX->storeMetaDataInBuffers( 1946 mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1947 if (err != OK) { 1948 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1949 mComponentName.c_str(), err); 1950 1951 // if adaptive playback has been requested, try JB fallback 1952 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1953 // LARGE MEMORY REQUIREMENT 1954 1955 // we will not do adaptive playback on software accessed 1956 // surfaces as they never had to respond to changes in the 1957 // crop window, and we don't trust that they will be able to. 1958 int usageBits = 0; 1959 bool canDoAdaptivePlayback; 1960 1961 if (nativeWindow->query( 1962 nativeWindow.get(), 1963 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1964 &usageBits) != OK) { 1965 canDoAdaptivePlayback = false; 1966 } else { 1967 canDoAdaptivePlayback = 1968 (usageBits & 1969 (GRALLOC_USAGE_SW_READ_MASK | 1970 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1971 } 1972 1973 int32_t maxWidth = 0, maxHeight = 0; 1974 if (canDoAdaptivePlayback && 1975 msg->findInt32("max-width", &maxWidth) && 1976 msg->findInt32("max-height", &maxHeight)) { 1977 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1978 mComponentName.c_str(), maxWidth, maxHeight); 1979 1980 err = mOMX->prepareForAdaptivePlayback( 1981 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1982 maxHeight); 1983 ALOGW_IF(err != OK, 1984 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1985 mComponentName.c_str(), err); 1986 1987 if (err == OK) { 1988 inputFormat->setInt32("max-width", maxWidth); 1989 inputFormat->setInt32("max-height", maxHeight); 1990 inputFormat->setInt32("adaptive-playback", true); 1991 } 1992 } 1993 // allow failure 1994 err = OK; 1995 } else { 1996 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1997 mComponentName.c_str()); 1998 CHECK(storingMetadataInDecodedBuffers()); 1999 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 2000 "legacy-adaptive", !msg->contains("no-experiments")); 2001 2002 inputFormat->setInt32("adaptive-playback", true); 2003 } 2004 2005 int32_t push; 2006 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 2007 && push != 0) { 2008 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 2009 } 2010 } 2011 2012 int32_t rotationDegrees; 2013 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 2014 mRotationDegrees = rotationDegrees; 2015 } else { 2016 mRotationDegrees = 0; 2017 } 2018 } 2019 2020 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 2021 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 2022 // invalid encodings will default to PCM-16bit in setupRawAudioFormat. 2023 2024 if (video) { 2025 // determine need for software renderer 2026 bool usingSwRenderer = false; 2027 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 2028 usingSwRenderer = true; 2029 haveNativeWindow = false; 2030 } 2031 2032 if (encoder) { 2033 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 2034 } else { 2035 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2036 } 2037 2038 if (err != OK) { 2039 return err; 2040 } 2041 2042 if (haveNativeWindow) { 2043 mNativeWindow = static_cast<Surface *>(obj.get()); 2044 } 2045 2046 // initialize native window now to get actual output format 2047 // TODO: this is needed for some encoders even though they don't use native window 2048 err = initNativeWindow(); 2049 if (err != OK) { 2050 return err; 2051 } 2052 2053 // fallback for devices that do not handle flex-YUV for native buffers 2054 if (haveNativeWindow) { 2055 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 2056 if (msg->findInt32("color-format", &requestedColorFormat) && 2057 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 2058 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 2059 if (err != OK) { 2060 return err; 2061 } 2062 int32_t colorFormat = OMX_COLOR_FormatUnused; 2063 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 2064 if (!outputFormat->findInt32("color-format", &colorFormat)) { 2065 ALOGE("ouptut port did not have a color format (wrong domain?)"); 2066 return BAD_VALUE; 2067 } 2068 ALOGD("[%s] Requested output format %#x and got %#x.", 2069 mComponentName.c_str(), requestedColorFormat, colorFormat); 2070 if (!isFlexibleColorFormat( 2071 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 2072 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 2073 // device did not handle flex-YUV request for native window, fall back 2074 // to SW renderer 2075 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 2076 mNativeWindow.clear(); 2077 mNativeWindowUsageBits = 0; 2078 haveNativeWindow = false; 2079 usingSwRenderer = true; 2080 if (storingMetadataInDecodedBuffers()) { 2081 err = mOMX->storeMetaDataInBuffers( 2082 mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 2083 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 2084 // TODO: implement adaptive-playback support for bytebuffer mode. 2085 // This is done by SW codecs, but most HW codecs don't support it. 2086 inputFormat->setInt32("adaptive-playback", false); 2087 } 2088 if (err == OK) { 2089 err = mOMX->enableNativeBuffers( 2090 mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 2091 } 2092 if (mFlags & kFlagIsGrallocUsageProtected) { 2093 // fallback is not supported for protected playback 2094 err = PERMISSION_DENIED; 2095 } else if (err == OK) { 2096 err = setupVideoDecoder( 2097 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2098 } 2099 } 2100 } 2101 } 2102 2103 if (usingSwRenderer) { 2104 outputFormat->setInt32("using-sw-renderer", 1); 2105 } 2106 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2107 int32_t numChannels, sampleRate; 2108 if (!msg->findInt32("channel-count", &numChannels) 2109 || !msg->findInt32("sample-rate", &sampleRate)) { 2110 // Since we did not always check for these, leave them optional 2111 // and have the decoder figure it all out. 2112 err = OK; 2113 } else { 2114 err = setupRawAudioFormat( 2115 encoder ? kPortIndexInput : kPortIndexOutput, 2116 sampleRate, 2117 numChannels); 2118 } 2119 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2120 int32_t numChannels, sampleRate; 2121 if (!msg->findInt32("channel-count", &numChannels) 2122 || !msg->findInt32("sample-rate", &sampleRate)) { 2123 err = INVALID_OPERATION; 2124 } else { 2125 int32_t isADTS, aacProfile; 2126 int32_t sbrMode; 2127 int32_t maxOutputChannelCount; 2128 int32_t pcmLimiterEnable; 2129 drcParams_t drc; 2130 if (!msg->findInt32("is-adts", &isADTS)) { 2131 isADTS = 0; 2132 } 2133 if (!msg->findInt32("aac-profile", &aacProfile)) { 2134 aacProfile = OMX_AUDIO_AACObjectNull; 2135 } 2136 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2137 sbrMode = -1; 2138 } 2139 2140 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2141 maxOutputChannelCount = -1; 2142 } 2143 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2144 // value is unknown 2145 pcmLimiterEnable = -1; 2146 } 2147 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2148 // value is unknown 2149 drc.encodedTargetLevel = -1; 2150 } 2151 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2152 // value is unknown 2153 drc.drcCut = -1; 2154 } 2155 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2156 // value is unknown 2157 drc.drcBoost = -1; 2158 } 2159 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2160 // value is unknown 2161 drc.heavyCompression = -1; 2162 } 2163 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2164 // value is unknown 2165 drc.targetRefLevel = -1; 2166 } 2167 2168 err = setupAACCodec( 2169 encoder, numChannels, sampleRate, bitRate, aacProfile, 2170 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2171 pcmLimiterEnable); 2172 } 2173 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2174 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2175 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2176 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2177 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2178 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2179 // These are PCM-like formats with a fixed sample rate but 2180 // a variable number of channels. 2181 2182 int32_t numChannels; 2183 if (!msg->findInt32("channel-count", &numChannels)) { 2184 err = INVALID_OPERATION; 2185 } else { 2186 int32_t sampleRate; 2187 if (!msg->findInt32("sample-rate", &sampleRate)) { 2188 sampleRate = 8000; 2189 } 2190 err = setupG711Codec(encoder, sampleRate, numChannels); 2191 } 2192 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2193 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2194 if (encoder && 2195 (!msg->findInt32("channel-count", &numChannels) 2196 || !msg->findInt32("sample-rate", &sampleRate))) { 2197 ALOGE("missing channel count or sample rate for FLAC encoder"); 2198 err = INVALID_OPERATION; 2199 } else { 2200 if (encoder) { 2201 if (!msg->findInt32( 2202 "complexity", &compressionLevel) && 2203 !msg->findInt32( 2204 "flac-compression-level", &compressionLevel)) { 2205 compressionLevel = 5; // default FLAC compression level 2206 } else if (compressionLevel < 0) { 2207 ALOGW("compression level %d outside [0..8] range, " 2208 "using 0", 2209 compressionLevel); 2210 compressionLevel = 0; 2211 } else if (compressionLevel > 8) { 2212 ALOGW("compression level %d outside [0..8] range, " 2213 "using 8", 2214 compressionLevel); 2215 compressionLevel = 8; 2216 } 2217 } 2218 err = setupFlacCodec( 2219 encoder, numChannels, sampleRate, compressionLevel); 2220 } 2221 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2222 int32_t numChannels, sampleRate; 2223 if (encoder 2224 || !msg->findInt32("channel-count", &numChannels) 2225 || !msg->findInt32("sample-rate", &sampleRate)) { 2226 err = INVALID_OPERATION; 2227 } else { 2228 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); 2229 } 2230 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2231 int32_t numChannels; 2232 int32_t sampleRate; 2233 if (!msg->findInt32("channel-count", &numChannels) 2234 || !msg->findInt32("sample-rate", &sampleRate)) { 2235 err = INVALID_OPERATION; 2236 } else { 2237 err = setupAC3Codec(encoder, numChannels, sampleRate); 2238 } 2239 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2240 int32_t numChannels; 2241 int32_t sampleRate; 2242 if (!msg->findInt32("channel-count", &numChannels) 2243 || !msg->findInt32("sample-rate", &sampleRate)) { 2244 err = INVALID_OPERATION; 2245 } else { 2246 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2247 } 2248 } 2249 2250 if (err != OK) { 2251 return err; 2252 } 2253 2254 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2255 mEncoderDelay = 0; 2256 } 2257 2258 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2259 mEncoderPadding = 0; 2260 } 2261 2262 if (msg->findInt32("channel-mask", &mChannelMask)) { 2263 mChannelMaskPresent = true; 2264 } else { 2265 mChannelMaskPresent = false; 2266 } 2267 2268 int32_t maxInputSize; 2269 if (msg->findInt32("max-input-size", &maxInputSize)) { 2270 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2271 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2272 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2273 } 2274 2275 int32_t priority; 2276 if (msg->findInt32("priority", &priority)) { 2277 err = setPriority(priority); 2278 } 2279 2280 int32_t rateInt = -1; 2281 float rateFloat = -1; 2282 if (!msg->findFloat("operating-rate", &rateFloat)) { 2283 msg->findInt32("operating-rate", &rateInt); 2284 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2285 } 2286 if (rateFloat > 0) { 2287 err = setOperatingRate(rateFloat, video); 2288 } 2289 2290 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame. 2291 mBaseOutputFormat = outputFormat; 2292 // trigger a kWhatOutputFormatChanged msg on first buffer 2293 mLastOutputFormat.clear(); 2294 2295 err = getPortFormat(kPortIndexInput, inputFormat); 2296 if (err == OK) { 2297 err = getPortFormat(kPortIndexOutput, outputFormat); 2298 if (err == OK) { 2299 mInputFormat = inputFormat; 2300 mOutputFormat = outputFormat; 2301 } 2302 } 2303 2304 // create data converters if needed 2305 if (!video && err == OK) { 2306 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 2307 if (encoder) { 2308 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2309 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); 2310 if (mConverter[kPortIndexInput] != NULL) { 2311 mInputFormat->setInt32("pcm-encoding", pcmEncoding); 2312 } 2313 } else { 2314 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2315 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 2316 if (mConverter[kPortIndexOutput] != NULL) { 2317 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 2318 } 2319 } 2320 } 2321 2322 return err; 2323} 2324 2325status_t ACodec::setPriority(int32_t priority) { 2326 if (priority < 0) { 2327 return BAD_VALUE; 2328 } 2329 OMX_PARAM_U32TYPE config; 2330 InitOMXParams(&config); 2331 config.nU32 = (OMX_U32)priority; 2332 status_t temp = mOMX->setConfig( 2333 mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2334 &config, sizeof(config)); 2335 if (temp != OK) { 2336 ALOGI("codec does not support config priority (err %d)", temp); 2337 } 2338 return OK; 2339} 2340 2341status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2342 if (rateFloat < 0) { 2343 return BAD_VALUE; 2344 } 2345 OMX_U32 rate; 2346 if (isVideo) { 2347 if (rateFloat > 65535) { 2348 return BAD_VALUE; 2349 } 2350 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2351 } else { 2352 if (rateFloat > UINT_MAX) { 2353 return BAD_VALUE; 2354 } 2355 rate = (OMX_U32)(rateFloat); 2356 } 2357 OMX_PARAM_U32TYPE config; 2358 InitOMXParams(&config); 2359 config.nU32 = rate; 2360 status_t err = mOMX->setConfig( 2361 mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2362 &config, sizeof(config)); 2363 if (err != OK) { 2364 ALOGI("codec does not support config operating rate (err %d)", err); 2365 } 2366 return OK; 2367} 2368 2369status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2370 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2371 InitOMXParams(¶ms); 2372 params.nPortIndex = kPortIndexOutput; 2373 status_t err = mOMX->getConfig( 2374 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2375 if (err == OK) { 2376 *intraRefreshPeriod = params.nRefreshPeriod; 2377 return OK; 2378 } 2379 2380 // Fallback to query through standard OMX index. 2381 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2382 InitOMXParams(&refreshParams); 2383 refreshParams.nPortIndex = kPortIndexOutput; 2384 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2385 err = mOMX->getParameter( 2386 mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2387 if (err != OK || refreshParams.nCirMBs == 0) { 2388 *intraRefreshPeriod = 0; 2389 return OK; 2390 } 2391 2392 // Calculate period based on width and height 2393 uint32_t width, height; 2394 OMX_PARAM_PORTDEFINITIONTYPE def; 2395 InitOMXParams(&def); 2396 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2397 def.nPortIndex = kPortIndexOutput; 2398 err = mOMX->getParameter( 2399 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2400 if (err != OK) { 2401 *intraRefreshPeriod = 0; 2402 return err; 2403 } 2404 width = video_def->nFrameWidth; 2405 height = video_def->nFrameHeight; 2406 // Use H.264/AVC MacroBlock size 16x16 2407 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2408 2409 return OK; 2410} 2411 2412status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2413 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2414 InitOMXParams(¶ms); 2415 params.nPortIndex = kPortIndexOutput; 2416 params.nRefreshPeriod = intraRefreshPeriod; 2417 status_t err = mOMX->setConfig( 2418 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2419 if (err == OK) { 2420 return OK; 2421 } 2422 2423 // Only in configure state, a component could invoke setParameter. 2424 if (!inConfigure) { 2425 return INVALID_OPERATION; 2426 } else { 2427 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2428 } 2429 2430 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2431 InitOMXParams(&refreshParams); 2432 refreshParams.nPortIndex = kPortIndexOutput; 2433 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2434 2435 if (intraRefreshPeriod == 0) { 2436 // 0 means disable intra refresh. 2437 refreshParams.nCirMBs = 0; 2438 } else { 2439 // Calculate macroblocks that need to be intra coded base on width and height 2440 uint32_t width, height; 2441 OMX_PARAM_PORTDEFINITIONTYPE def; 2442 InitOMXParams(&def); 2443 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2444 def.nPortIndex = kPortIndexOutput; 2445 err = mOMX->getParameter( 2446 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2447 if (err != OK) { 2448 return err; 2449 } 2450 width = video_def->nFrameWidth; 2451 height = video_def->nFrameHeight; 2452 // Use H.264/AVC MacroBlock size 16x16 2453 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2454 } 2455 2456 err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh, 2457 &refreshParams, sizeof(refreshParams)); 2458 if (err != OK) { 2459 return err; 2460 } 2461 2462 return OK; 2463} 2464 2465status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2466 OMX_PARAM_PORTDEFINITIONTYPE def; 2467 InitOMXParams(&def); 2468 def.nPortIndex = portIndex; 2469 2470 status_t err = mOMX->getParameter( 2471 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2472 2473 if (err != OK) { 2474 return err; 2475 } 2476 2477 if (def.nBufferSize >= size) { 2478 return OK; 2479 } 2480 2481 def.nBufferSize = size; 2482 2483 err = mOMX->setParameter( 2484 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2485 2486 if (err != OK) { 2487 return err; 2488 } 2489 2490 err = mOMX->getParameter( 2491 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2492 2493 if (err != OK) { 2494 return err; 2495 } 2496 2497 if (def.nBufferSize < size) { 2498 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2499 return FAILED_TRANSACTION; 2500 } 2501 2502 return OK; 2503} 2504 2505status_t ACodec::selectAudioPortFormat( 2506 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2507 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2508 InitOMXParams(&format); 2509 2510 format.nPortIndex = portIndex; 2511 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2512 format.nIndex = index; 2513 status_t err = mOMX->getParameter( 2514 mNode, OMX_IndexParamAudioPortFormat, 2515 &format, sizeof(format)); 2516 2517 if (err != OK) { 2518 return err; 2519 } 2520 2521 if (format.eEncoding == desiredFormat) { 2522 break; 2523 } 2524 2525 if (index == kMaxIndicesToCheck) { 2526 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2527 mComponentName.c_str(), index, 2528 asString(format.eEncoding), format.eEncoding); 2529 return ERROR_UNSUPPORTED; 2530 } 2531 } 2532 2533 return mOMX->setParameter( 2534 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2535} 2536 2537status_t ACodec::setupAACCodec( 2538 bool encoder, int32_t numChannels, int32_t sampleRate, 2539 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2540 int32_t maxOutputChannelCount, const drcParams_t& drc, 2541 int32_t pcmLimiterEnable) { 2542 if (encoder && isADTS) { 2543 return -EINVAL; 2544 } 2545 2546 status_t err = setupRawAudioFormat( 2547 encoder ? kPortIndexInput : kPortIndexOutput, 2548 sampleRate, 2549 numChannels); 2550 2551 if (err != OK) { 2552 return err; 2553 } 2554 2555 if (encoder) { 2556 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2557 2558 if (err != OK) { 2559 return err; 2560 } 2561 2562 OMX_PARAM_PORTDEFINITIONTYPE def; 2563 InitOMXParams(&def); 2564 def.nPortIndex = kPortIndexOutput; 2565 2566 err = mOMX->getParameter( 2567 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2568 2569 if (err != OK) { 2570 return err; 2571 } 2572 2573 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2574 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2575 2576 err = mOMX->setParameter( 2577 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2578 2579 if (err != OK) { 2580 return err; 2581 } 2582 2583 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2584 InitOMXParams(&profile); 2585 profile.nPortIndex = kPortIndexOutput; 2586 2587 err = mOMX->getParameter( 2588 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2589 2590 if (err != OK) { 2591 return err; 2592 } 2593 2594 profile.nChannels = numChannels; 2595 2596 profile.eChannelMode = 2597 (numChannels == 1) 2598 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2599 2600 profile.nSampleRate = sampleRate; 2601 profile.nBitRate = bitRate; 2602 profile.nAudioBandWidth = 0; 2603 profile.nFrameLength = 0; 2604 profile.nAACtools = OMX_AUDIO_AACToolAll; 2605 profile.nAACERtools = OMX_AUDIO_AACERNone; 2606 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2607 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2608 switch (sbrMode) { 2609 case 0: 2610 // disable sbr 2611 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2612 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2613 break; 2614 case 1: 2615 // enable single-rate sbr 2616 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2617 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2618 break; 2619 case 2: 2620 // enable dual-rate sbr 2621 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2622 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2623 break; 2624 case -1: 2625 // enable both modes -> the codec will decide which mode should be used 2626 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2627 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2628 break; 2629 default: 2630 // unsupported sbr mode 2631 return BAD_VALUE; 2632 } 2633 2634 2635 err = mOMX->setParameter( 2636 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2637 2638 if (err != OK) { 2639 return err; 2640 } 2641 2642 return err; 2643 } 2644 2645 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2646 InitOMXParams(&profile); 2647 profile.nPortIndex = kPortIndexInput; 2648 2649 err = mOMX->getParameter( 2650 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2651 2652 if (err != OK) { 2653 return err; 2654 } 2655 2656 profile.nChannels = numChannels; 2657 profile.nSampleRate = sampleRate; 2658 2659 profile.eAACStreamFormat = 2660 isADTS 2661 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2662 : OMX_AUDIO_AACStreamFormatMP4FF; 2663 2664 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2665 InitOMXParams(&presentation); 2666 presentation.nMaxOutputChannels = maxOutputChannelCount; 2667 presentation.nDrcCut = drc.drcCut; 2668 presentation.nDrcBoost = drc.drcBoost; 2669 presentation.nHeavyCompression = drc.heavyCompression; 2670 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2671 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2672 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2673 2674 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2675 if (res == OK) { 2676 // optional parameters, will not cause configuration failure 2677 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2678 &presentation, sizeof(presentation)); 2679 } else { 2680 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2681 } 2682 return res; 2683} 2684 2685status_t ACodec::setupAC3Codec( 2686 bool encoder, int32_t numChannels, int32_t sampleRate) { 2687 status_t err = setupRawAudioFormat( 2688 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2689 2690 if (err != OK) { 2691 return err; 2692 } 2693 2694 if (encoder) { 2695 ALOGW("AC3 encoding is not supported."); 2696 return INVALID_OPERATION; 2697 } 2698 2699 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2700 InitOMXParams(&def); 2701 def.nPortIndex = kPortIndexInput; 2702 2703 err = mOMX->getParameter( 2704 mNode, 2705 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2706 &def, 2707 sizeof(def)); 2708 2709 if (err != OK) { 2710 return err; 2711 } 2712 2713 def.nChannels = numChannels; 2714 def.nSampleRate = sampleRate; 2715 2716 return mOMX->setParameter( 2717 mNode, 2718 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2719 &def, 2720 sizeof(def)); 2721} 2722 2723status_t ACodec::setupEAC3Codec( 2724 bool encoder, int32_t numChannels, int32_t sampleRate) { 2725 status_t err = setupRawAudioFormat( 2726 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2727 2728 if (err != OK) { 2729 return err; 2730 } 2731 2732 if (encoder) { 2733 ALOGW("EAC3 encoding is not supported."); 2734 return INVALID_OPERATION; 2735 } 2736 2737 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2738 InitOMXParams(&def); 2739 def.nPortIndex = kPortIndexInput; 2740 2741 err = mOMX->getParameter( 2742 mNode, 2743 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2744 &def, 2745 sizeof(def)); 2746 2747 if (err != OK) { 2748 return err; 2749 } 2750 2751 def.nChannels = numChannels; 2752 def.nSampleRate = sampleRate; 2753 2754 return mOMX->setParameter( 2755 mNode, 2756 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2757 &def, 2758 sizeof(def)); 2759} 2760 2761static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2762 bool isAMRWB, int32_t bps) { 2763 if (isAMRWB) { 2764 if (bps <= 6600) { 2765 return OMX_AUDIO_AMRBandModeWB0; 2766 } else if (bps <= 8850) { 2767 return OMX_AUDIO_AMRBandModeWB1; 2768 } else if (bps <= 12650) { 2769 return OMX_AUDIO_AMRBandModeWB2; 2770 } else if (bps <= 14250) { 2771 return OMX_AUDIO_AMRBandModeWB3; 2772 } else if (bps <= 15850) { 2773 return OMX_AUDIO_AMRBandModeWB4; 2774 } else if (bps <= 18250) { 2775 return OMX_AUDIO_AMRBandModeWB5; 2776 } else if (bps <= 19850) { 2777 return OMX_AUDIO_AMRBandModeWB6; 2778 } else if (bps <= 23050) { 2779 return OMX_AUDIO_AMRBandModeWB7; 2780 } 2781 2782 // 23850 bps 2783 return OMX_AUDIO_AMRBandModeWB8; 2784 } else { // AMRNB 2785 if (bps <= 4750) { 2786 return OMX_AUDIO_AMRBandModeNB0; 2787 } else if (bps <= 5150) { 2788 return OMX_AUDIO_AMRBandModeNB1; 2789 } else if (bps <= 5900) { 2790 return OMX_AUDIO_AMRBandModeNB2; 2791 } else if (bps <= 6700) { 2792 return OMX_AUDIO_AMRBandModeNB3; 2793 } else if (bps <= 7400) { 2794 return OMX_AUDIO_AMRBandModeNB4; 2795 } else if (bps <= 7950) { 2796 return OMX_AUDIO_AMRBandModeNB5; 2797 } else if (bps <= 10200) { 2798 return OMX_AUDIO_AMRBandModeNB6; 2799 } 2800 2801 // 12200 bps 2802 return OMX_AUDIO_AMRBandModeNB7; 2803 } 2804} 2805 2806status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2807 OMX_AUDIO_PARAM_AMRTYPE def; 2808 InitOMXParams(&def); 2809 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2810 2811 status_t err = 2812 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2813 2814 if (err != OK) { 2815 return err; 2816 } 2817 2818 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2819 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2820 2821 err = mOMX->setParameter( 2822 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2823 2824 if (err != OK) { 2825 return err; 2826 } 2827 2828 return setupRawAudioFormat( 2829 encoder ? kPortIndexInput : kPortIndexOutput, 2830 isWAMR ? 16000 : 8000 /* sampleRate */, 2831 1 /* numChannels */); 2832} 2833 2834status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2835 if (encoder) { 2836 return INVALID_OPERATION; 2837 } 2838 2839 return setupRawAudioFormat( 2840 kPortIndexInput, sampleRate, numChannels); 2841} 2842 2843status_t ACodec::setupFlacCodec( 2844 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2845 2846 if (encoder) { 2847 OMX_AUDIO_PARAM_FLACTYPE def; 2848 InitOMXParams(&def); 2849 def.nPortIndex = kPortIndexOutput; 2850 2851 // configure compression level 2852 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2853 if (err != OK) { 2854 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2855 return err; 2856 } 2857 def.nCompressionLevel = compressionLevel; 2858 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2859 if (err != OK) { 2860 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2861 return err; 2862 } 2863 } 2864 2865 return setupRawAudioFormat( 2866 encoder ? kPortIndexInput : kPortIndexOutput, 2867 sampleRate, 2868 numChannels); 2869} 2870 2871status_t ACodec::setupRawAudioFormat( 2872 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { 2873 OMX_PARAM_PORTDEFINITIONTYPE def; 2874 InitOMXParams(&def); 2875 def.nPortIndex = portIndex; 2876 2877 status_t err = mOMX->getParameter( 2878 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2879 2880 if (err != OK) { 2881 return err; 2882 } 2883 2884 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2885 2886 err = mOMX->setParameter( 2887 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2888 2889 if (err != OK) { 2890 return err; 2891 } 2892 2893 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2894 InitOMXParams(&pcmParams); 2895 pcmParams.nPortIndex = portIndex; 2896 2897 err = mOMX->getParameter( 2898 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2899 2900 if (err != OK) { 2901 return err; 2902 } 2903 2904 pcmParams.nChannels = numChannels; 2905 switch (encoding) { 2906 case kAudioEncodingPcm8bit: 2907 pcmParams.eNumData = OMX_NumericalDataUnsigned; 2908 pcmParams.nBitPerSample = 8; 2909 break; 2910 case kAudioEncodingPcmFloat: 2911 pcmParams.eNumData = OMX_NumericalDataFloat; 2912 pcmParams.nBitPerSample = 32; 2913 break; 2914 case kAudioEncodingPcm16bit: 2915 pcmParams.eNumData = OMX_NumericalDataSigned; 2916 pcmParams.nBitPerSample = 16; 2917 break; 2918 default: 2919 return BAD_VALUE; 2920 } 2921 pcmParams.bInterleaved = OMX_TRUE; 2922 pcmParams.nSamplingRate = sampleRate; 2923 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 2924 2925 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 2926 return OMX_ErrorNone; 2927 } 2928 2929 err = mOMX->setParameter( 2930 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2931 // if we could not set up raw format to non-16-bit, try with 16-bit 2932 // NOTE: we will also verify this via readback, in case codec ignores these fields 2933 if (err != OK && encoding != kAudioEncodingPcm16bit) { 2934 pcmParams.eNumData = OMX_NumericalDataSigned; 2935 pcmParams.nBitPerSample = 16; 2936 err = mOMX->setParameter( 2937 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2938 } 2939 return err; 2940} 2941 2942status_t ACodec::configureTunneledVideoPlayback( 2943 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 2944 native_handle_t* sidebandHandle; 2945 2946 status_t err = mOMX->configureVideoTunnelMode( 2947 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 2948 if (err != OK) { 2949 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 2950 return err; 2951 } 2952 2953 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 2954 if (err != OK) { 2955 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 2956 sidebandHandle, err); 2957 return err; 2958 } 2959 2960 return OK; 2961} 2962 2963status_t ACodec::setVideoPortFormatType( 2964 OMX_U32 portIndex, 2965 OMX_VIDEO_CODINGTYPE compressionFormat, 2966 OMX_COLOR_FORMATTYPE colorFormat, 2967 bool usingNativeBuffers) { 2968 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2969 InitOMXParams(&format); 2970 format.nPortIndex = portIndex; 2971 format.nIndex = 0; 2972 bool found = false; 2973 2974 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2975 format.nIndex = index; 2976 status_t err = mOMX->getParameter( 2977 mNode, OMX_IndexParamVideoPortFormat, 2978 &format, sizeof(format)); 2979 2980 if (err != OK) { 2981 return err; 2982 } 2983 2984 // substitute back flexible color format to codec supported format 2985 OMX_U32 flexibleEquivalent; 2986 if (compressionFormat == OMX_VIDEO_CodingUnused 2987 && isFlexibleColorFormat( 2988 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 2989 && colorFormat == flexibleEquivalent) { 2990 ALOGI("[%s] using color format %#x in place of %#x", 2991 mComponentName.c_str(), format.eColorFormat, colorFormat); 2992 colorFormat = format.eColorFormat; 2993 } 2994 2995 // The following assertion is violated by TI's video decoder. 2996 // CHECK_EQ(format.nIndex, index); 2997 2998 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 2999 if (portIndex == kPortIndexInput 3000 && colorFormat == format.eColorFormat) { 3001 // eCompressionFormat does not seem right. 3002 found = true; 3003 break; 3004 } 3005 if (portIndex == kPortIndexOutput 3006 && compressionFormat == format.eCompressionFormat) { 3007 // eColorFormat does not seem right. 3008 found = true; 3009 break; 3010 } 3011 } 3012 3013 if (format.eCompressionFormat == compressionFormat 3014 && format.eColorFormat == colorFormat) { 3015 found = true; 3016 break; 3017 } 3018 3019 if (index == kMaxIndicesToCheck) { 3020 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 3021 mComponentName.c_str(), index, 3022 asString(format.eCompressionFormat), format.eCompressionFormat, 3023 asString(format.eColorFormat), format.eColorFormat); 3024 } 3025 } 3026 3027 if (!found) { 3028 return UNKNOWN_ERROR; 3029 } 3030 3031 status_t err = mOMX->setParameter( 3032 mNode, OMX_IndexParamVideoPortFormat, 3033 &format, sizeof(format)); 3034 3035 return err; 3036} 3037 3038// Set optimal output format. OMX component lists output formats in the order 3039// of preference, but this got more complicated since the introduction of flexible 3040// YUV formats. We support a legacy behavior for applications that do not use 3041// surface output, do not specify an output format, but expect a "usable" standard 3042// OMX format. SW readable and standard formats must be flex-YUV. 3043// 3044// Suggested preference order: 3045// - optimal format for texture rendering (mediaplayer behavior) 3046// - optimal SW readable & texture renderable format (flex-YUV support) 3047// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 3048// - legacy "usable" standard formats 3049// 3050// For legacy support, we prefer a standard format, but will settle for a SW readable 3051// flex-YUV format. 3052status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 3053 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 3054 InitOMXParams(&format); 3055 format.nPortIndex = kPortIndexOutput; 3056 3057 InitOMXParams(&legacyFormat); 3058 // this field will change when we find a suitable legacy format 3059 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 3060 3061 for (OMX_U32 index = 0; ; ++index) { 3062 format.nIndex = index; 3063 status_t err = mOMX->getParameter( 3064 mNode, OMX_IndexParamVideoPortFormat, 3065 &format, sizeof(format)); 3066 if (err != OK) { 3067 // no more formats, pick legacy format if found 3068 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 3069 memcpy(&format, &legacyFormat, sizeof(format)); 3070 break; 3071 } 3072 return err; 3073 } 3074 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 3075 return OMX_ErrorBadParameter; 3076 } 3077 if (!getLegacyFlexibleFormat) { 3078 break; 3079 } 3080 // standard formats that were exposed to users before 3081 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 3082 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 3083 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 3084 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 3085 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 3086 break; 3087 } 3088 // find best legacy non-standard format 3089 OMX_U32 flexibleEquivalent; 3090 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 3091 && isFlexibleColorFormat( 3092 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */, 3093 &flexibleEquivalent) 3094 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 3095 memcpy(&legacyFormat, &format, sizeof(format)); 3096 } 3097 } 3098 return mOMX->setParameter( 3099 mNode, OMX_IndexParamVideoPortFormat, 3100 &format, sizeof(format)); 3101} 3102 3103static const struct VideoCodingMapEntry { 3104 const char *mMime; 3105 OMX_VIDEO_CODINGTYPE mVideoCodingType; 3106} kVideoCodingMapEntry[] = { 3107 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 3108 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 3109 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 3110 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 3111 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 3112 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 3113 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 3114 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 3115}; 3116 3117static status_t GetVideoCodingTypeFromMime( 3118 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3119 for (size_t i = 0; 3120 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3121 ++i) { 3122 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3123 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3124 return OK; 3125 } 3126 } 3127 3128 *codingType = OMX_VIDEO_CodingUnused; 3129 3130 return ERROR_UNSUPPORTED; 3131} 3132 3133static status_t GetMimeTypeForVideoCoding( 3134 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3135 for (size_t i = 0; 3136 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3137 ++i) { 3138 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3139 *mime = kVideoCodingMapEntry[i].mMime; 3140 return OK; 3141 } 3142 } 3143 3144 mime->clear(); 3145 3146 return ERROR_UNSUPPORTED; 3147} 3148 3149status_t ACodec::setPortBufferNum(OMX_U32 portIndex, int bufferNum) { 3150 OMX_PARAM_PORTDEFINITIONTYPE def; 3151 InitOMXParams(&def); 3152 def.nPortIndex = portIndex; 3153 status_t err; 3154 ALOGD("Setting [%s] %s port buffer number: %d", mComponentName.c_str(), 3155 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3156 err = mOMX->getParameter( 3157 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3158 if (err != OK) { 3159 return err; 3160 } 3161 def.nBufferCountActual = bufferNum; 3162 err = mOMX->setParameter( 3163 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3164 if (err != OK) { 3165 // Component could reject this request. 3166 ALOGW("Fail to set [%s] %s port buffer number: %d", mComponentName.c_str(), 3167 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3168 } 3169 return OK; 3170} 3171 3172status_t ACodec::setupVideoDecoder( 3173 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3174 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3175 int32_t width, height; 3176 if (!msg->findInt32("width", &width) 3177 || !msg->findInt32("height", &height)) { 3178 return INVALID_OPERATION; 3179 } 3180 3181 OMX_VIDEO_CODINGTYPE compressionFormat; 3182 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3183 3184 if (err != OK) { 3185 return err; 3186 } 3187 3188 if (compressionFormat == OMX_VIDEO_CodingVP9) { 3189 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 3190 InitOMXParams(¶ms); 3191 params.nPortIndex = kPortIndexInput; 3192 // Check if VP9 decoder advertises supported profiles. 3193 params.nProfileIndex = 0; 3194 status_t err = mOMX->getParameter( 3195 mNode, 3196 OMX_IndexParamVideoProfileLevelQuerySupported, 3197 ¶ms, 3198 sizeof(params)); 3199 mIsLegacyVP9Decoder = err != OK; 3200 } 3201 3202 err = setVideoPortFormatType( 3203 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3204 3205 if (err != OK) { 3206 return err; 3207 } 3208 3209 int32_t tmp; 3210 if (msg->findInt32("color-format", &tmp)) { 3211 OMX_COLOR_FORMATTYPE colorFormat = 3212 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3213 err = setVideoPortFormatType( 3214 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3215 if (err != OK) { 3216 ALOGW("[%s] does not support color format %d", 3217 mComponentName.c_str(), colorFormat); 3218 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3219 } 3220 } else { 3221 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3222 } 3223 3224 if (err != OK) { 3225 return err; 3226 } 3227 3228 // Set the component input buffer number to be |tmp|. If succeed, 3229 // component will set input port buffer number to be |tmp|. If fail, 3230 // component will keep the same buffer number as before. 3231 if (msg->findInt32("android._num-input-buffers", &tmp)) { 3232 err = setPortBufferNum(kPortIndexInput, tmp); 3233 if (err != OK) 3234 return err; 3235 } 3236 3237 // Set the component output buffer number to be |tmp|. If succeed, 3238 // component will set output port buffer number to be |tmp|. If fail, 3239 // component will keep the same buffer number as before. 3240 if (msg->findInt32("android._num-output-buffers", &tmp)) { 3241 err = setPortBufferNum(kPortIndexOutput, tmp); 3242 if (err != OK) 3243 return err; 3244 } 3245 3246 int32_t frameRateInt; 3247 float frameRateFloat; 3248 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3249 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3250 frameRateInt = -1; 3251 } 3252 frameRateFloat = (float)frameRateInt; 3253 } 3254 3255 err = setVideoFormatOnPort( 3256 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3257 3258 if (err != OK) { 3259 return err; 3260 } 3261 3262 err = setVideoFormatOnPort( 3263 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3264 3265 if (err != OK) { 3266 return err; 3267 } 3268 3269 err = setColorAspectsForVideoDecoder( 3270 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3271 if (err == ERROR_UNSUPPORTED) { // support is optional 3272 err = OK; 3273 } 3274 3275 if (err != OK) { 3276 return err; 3277 } 3278 3279 err = setHDRStaticInfoForVideoCodec(kPortIndexOutput, msg, outputFormat); 3280 if (err == ERROR_UNSUPPORTED) { // support is optional 3281 err = OK; 3282 } 3283 return err; 3284} 3285 3286status_t ACodec::initDescribeColorAspectsIndex() { 3287 status_t err = mOMX->getExtensionIndex( 3288 mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3289 if (err != OK) { 3290 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3291 } 3292 return err; 3293} 3294 3295status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3296 status_t err = ERROR_UNSUPPORTED; 3297 if (mDescribeColorAspectsIndex) { 3298 err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3299 } 3300 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3301 mComponentName.c_str(), 3302 params.sAspects.mRange, asString(params.sAspects.mRange), 3303 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3304 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3305 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3306 err, asString(err)); 3307 3308 if (verify && err == OK) { 3309 err = getCodecColorAspects(params); 3310 } 3311 3312 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3313 "[%s] setting color aspects failed even though codec advertises support", 3314 mComponentName.c_str()); 3315 return err; 3316} 3317 3318status_t ACodec::setColorAspectsForVideoDecoder( 3319 int32_t width, int32_t height, bool usingNativeWindow, 3320 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3321 DescribeColorAspectsParams params; 3322 InitOMXParams(¶ms); 3323 params.nPortIndex = kPortIndexOutput; 3324 3325 getColorAspectsFromFormat(configFormat, params.sAspects); 3326 if (usingNativeWindow) { 3327 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3328 // The default aspects will be set back to the output format during the 3329 // getFormat phase of configure(). Set non-Unspecified values back into the 3330 // format, in case component does not support this enumeration. 3331 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3332 } 3333 3334 (void)initDescribeColorAspectsIndex(); 3335 3336 // communicate color aspects to codec 3337 return setCodecColorAspects(params); 3338} 3339 3340status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3341 status_t err = ERROR_UNSUPPORTED; 3342 if (mDescribeColorAspectsIndex) { 3343 err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3344 } 3345 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3346 mComponentName.c_str(), 3347 params.sAspects.mRange, asString(params.sAspects.mRange), 3348 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3349 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3350 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3351 err, asString(err)); 3352 if (params.bRequestingDataSpace) { 3353 ALOGV("for dataspace %#x", params.nDataSpace); 3354 } 3355 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3356 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3357 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3358 mComponentName.c_str()); 3359 } 3360 return err; 3361} 3362 3363status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3364 DescribeColorAspectsParams params; 3365 InitOMXParams(¶ms); 3366 params.nPortIndex = kPortIndexInput; 3367 status_t err = getCodecColorAspects(params); 3368 if (err == OK) { 3369 // we only set encoder input aspects if codec supports them 3370 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3371 } 3372 return err; 3373} 3374 3375status_t ACodec::getDataSpace( 3376 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3377 bool tryCodec) { 3378 status_t err = OK; 3379 if (tryCodec) { 3380 // request dataspace guidance from codec. 3381 params.bRequestingDataSpace = OMX_TRUE; 3382 err = getCodecColorAspects(params); 3383 params.bRequestingDataSpace = OMX_FALSE; 3384 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3385 *dataSpace = (android_dataspace)params.nDataSpace; 3386 return err; 3387 } else if (err == ERROR_UNSUPPORTED) { 3388 // ignore not-implemented error for dataspace requests 3389 err = OK; 3390 } 3391 } 3392 3393 // this returns legacy versions if available 3394 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3395 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3396 "and dataspace %#x", 3397 mComponentName.c_str(), 3398 params.sAspects.mRange, asString(params.sAspects.mRange), 3399 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3400 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3401 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3402 *dataSpace); 3403 return err; 3404} 3405 3406 3407status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3408 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3409 android_dataspace *dataSpace) { 3410 DescribeColorAspectsParams params; 3411 InitOMXParams(¶ms); 3412 params.nPortIndex = kPortIndexOutput; 3413 3414 // reset default format and get resulting format 3415 getColorAspectsFromFormat(configFormat, params.sAspects); 3416 if (dataSpace != NULL) { 3417 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3418 } 3419 status_t err = setCodecColorAspects(params, true /* readBack */); 3420 3421 // we always set specified aspects for decoders 3422 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3423 3424 if (dataSpace != NULL) { 3425 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3426 if (err == OK) { 3427 err = res; 3428 } 3429 } 3430 3431 return err; 3432} 3433 3434// initial video encoder setup for bytebuffer mode 3435status_t ACodec::setColorAspectsForVideoEncoder( 3436 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3437 // copy config to output format as this is not exposed via getFormat 3438 copyColorConfig(configFormat, outputFormat); 3439 3440 DescribeColorAspectsParams params; 3441 InitOMXParams(¶ms); 3442 params.nPortIndex = kPortIndexInput; 3443 getColorAspectsFromFormat(configFormat, params.sAspects); 3444 3445 (void)initDescribeColorAspectsIndex(); 3446 3447 int32_t usingRecorder; 3448 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3449 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3450 int32_t width, height; 3451 if (configFormat->findInt32("width", &width) 3452 && configFormat->findInt32("height", &height)) { 3453 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3454 status_t err = getDataSpace( 3455 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3456 if (err != OK) { 3457 return err; 3458 } 3459 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3460 } 3461 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3462 } 3463 3464 // communicate color aspects to codec, but do not allow change of the platform aspects 3465 ColorAspects origAspects = params.sAspects; 3466 for (int triesLeft = 2; --triesLeft >= 0; ) { 3467 status_t err = setCodecColorAspects(params, true /* readBack */); 3468 if (err != OK 3469 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3470 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3471 return err; 3472 } 3473 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3474 mComponentName.c_str()); 3475 } 3476 return OK; 3477} 3478 3479status_t ACodec::setHDRStaticInfoForVideoCodec( 3480 OMX_U32 portIndex, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3481 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3482 3483 DescribeHDRStaticInfoParams params; 3484 InitOMXParams(¶ms); 3485 params.nPortIndex = portIndex; 3486 3487 HDRStaticInfo *info = ¶ms.sInfo; 3488 if (getHDRStaticInfoFromFormat(configFormat, info)) { 3489 setHDRStaticInfoIntoFormat(params.sInfo, outputFormat); 3490 } 3491 3492 (void)initDescribeHDRStaticInfoIndex(); 3493 3494 // communicate HDR static Info to codec 3495 return setHDRStaticInfo(params); 3496} 3497 3498// subsequent initial video encoder setup for surface mode 3499status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3500 android_dataspace *dataSpace /* nonnull */) { 3501 DescribeColorAspectsParams params; 3502 InitOMXParams(¶ms); 3503 params.nPortIndex = kPortIndexInput; 3504 ColorAspects &aspects = params.sAspects; 3505 3506 // reset default format and store resulting format into both input and output formats 3507 getColorAspectsFromFormat(mConfigFormat, aspects); 3508 int32_t width, height; 3509 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3510 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3511 } 3512 setColorAspectsIntoFormat(aspects, mInputFormat); 3513 setColorAspectsIntoFormat(aspects, mOutputFormat); 3514 3515 // communicate color aspects to codec, but do not allow any change 3516 ColorAspects origAspects = aspects; 3517 status_t err = OK; 3518 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3519 status_t err = setCodecColorAspects(params, true /* readBack */); 3520 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3521 break; 3522 } 3523 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3524 mComponentName.c_str()); 3525 } 3526 3527 *dataSpace = HAL_DATASPACE_BT709; 3528 aspects = origAspects; // restore desired color aspects 3529 status_t res = getDataSpace( 3530 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3531 if (err == OK) { 3532 err = res; 3533 } 3534 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3535 mInputFormat->setBuffer( 3536 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3537 3538 // update input format with codec supported color aspects (basically set unsupported 3539 // aspects to Unspecified) 3540 if (err == OK) { 3541 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3542 } 3543 3544 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3545 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3546 3547 return err; 3548} 3549 3550status_t ACodec::getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format) { 3551 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3552 DescribeHDRStaticInfoParams params; 3553 InitOMXParams(¶ms); 3554 params.nPortIndex = portIndex; 3555 3556 status_t err = getHDRStaticInfo(params); 3557 if (err == OK) { 3558 // we only set decodec output HDRStaticInfo if codec supports them 3559 setHDRStaticInfoIntoFormat(params.sInfo, format); 3560 } 3561 return err; 3562} 3563 3564status_t ACodec::initDescribeHDRStaticInfoIndex() { 3565 status_t err = mOMX->getExtensionIndex( 3566 mNode, "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex); 3567 if (err != OK) { 3568 mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0; 3569 } 3570 return err; 3571} 3572 3573status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) { 3574 status_t err = ERROR_UNSUPPORTED; 3575 if (mDescribeHDRStaticInfoIndex) { 3576 err = mOMX->setConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3577 } 3578 3579 const HDRStaticInfo *info = ¶ms.sInfo; 3580 ALOGV("[%s] setting HDRStaticInfo (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, " 3581 "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)", 3582 mComponentName.c_str(), 3583 info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y, 3584 info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y, 3585 info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance, 3586 info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel); 3587 3588 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3589 "[%s] setting HDRStaticInfo failed even though codec advertises support", 3590 mComponentName.c_str()); 3591 return err; 3592} 3593 3594status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms) { 3595 status_t err = ERROR_UNSUPPORTED; 3596 if (mDescribeHDRStaticInfoIndex) { 3597 err = mOMX->getConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3598 } 3599 3600 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3601 "[%s] getting HDRStaticInfo failed even though codec advertises support", 3602 mComponentName.c_str()); 3603 return err; 3604} 3605 3606status_t ACodec::setupVideoEncoder( 3607 const char *mime, const sp<AMessage> &msg, 3608 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3609 int32_t tmp; 3610 if (!msg->findInt32("color-format", &tmp)) { 3611 return INVALID_OPERATION; 3612 } 3613 3614 OMX_COLOR_FORMATTYPE colorFormat = 3615 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3616 3617 status_t err = setVideoPortFormatType( 3618 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3619 3620 if (err != OK) { 3621 ALOGE("[%s] does not support color format %d", 3622 mComponentName.c_str(), colorFormat); 3623 3624 return err; 3625 } 3626 3627 /* Input port configuration */ 3628 3629 OMX_PARAM_PORTDEFINITIONTYPE def; 3630 InitOMXParams(&def); 3631 3632 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3633 3634 def.nPortIndex = kPortIndexInput; 3635 3636 err = mOMX->getParameter( 3637 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3638 3639 if (err != OK) { 3640 return err; 3641 } 3642 3643 int32_t width, height, bitrate; 3644 if (!msg->findInt32("width", &width) 3645 || !msg->findInt32("height", &height) 3646 || !msg->findInt32("bitrate", &bitrate)) { 3647 return INVALID_OPERATION; 3648 } 3649 3650 video_def->nFrameWidth = width; 3651 video_def->nFrameHeight = height; 3652 3653 int32_t stride; 3654 if (!msg->findInt32("stride", &stride)) { 3655 stride = width; 3656 } 3657 3658 video_def->nStride = stride; 3659 3660 int32_t sliceHeight; 3661 if (!msg->findInt32("slice-height", &sliceHeight)) { 3662 sliceHeight = height; 3663 } 3664 3665 video_def->nSliceHeight = sliceHeight; 3666 3667 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3668 3669 float frameRate; 3670 if (!msg->findFloat("frame-rate", &frameRate)) { 3671 int32_t tmp; 3672 if (!msg->findInt32("frame-rate", &tmp)) { 3673 return INVALID_OPERATION; 3674 } 3675 frameRate = (float)tmp; 3676 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3677 } 3678 3679 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3680 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3681 // this is redundant as it was already set up in setVideoPortFormatType 3682 // FIXME for now skip this only for flexible YUV formats 3683 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3684 video_def->eColorFormat = colorFormat; 3685 } 3686 3687 err = mOMX->setParameter( 3688 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3689 3690 if (err != OK) { 3691 ALOGE("[%s] failed to set input port definition parameters.", 3692 mComponentName.c_str()); 3693 3694 return err; 3695 } 3696 3697 /* Output port configuration */ 3698 3699 OMX_VIDEO_CODINGTYPE compressionFormat; 3700 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3701 3702 if (err != OK) { 3703 return err; 3704 } 3705 3706 err = setVideoPortFormatType( 3707 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3708 3709 if (err != OK) { 3710 ALOGE("[%s] does not support compression format %d", 3711 mComponentName.c_str(), compressionFormat); 3712 3713 return err; 3714 } 3715 3716 def.nPortIndex = kPortIndexOutput; 3717 3718 err = mOMX->getParameter( 3719 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3720 3721 if (err != OK) { 3722 return err; 3723 } 3724 3725 video_def->nFrameWidth = width; 3726 video_def->nFrameHeight = height; 3727 video_def->xFramerate = 0; 3728 video_def->nBitrate = bitrate; 3729 video_def->eCompressionFormat = compressionFormat; 3730 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3731 3732 err = mOMX->setParameter( 3733 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3734 3735 if (err != OK) { 3736 ALOGE("[%s] failed to set output port definition parameters.", 3737 mComponentName.c_str()); 3738 3739 return err; 3740 } 3741 3742 int32_t intraRefreshPeriod = 0; 3743 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3744 && intraRefreshPeriod >= 0) { 3745 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3746 if (err != OK) { 3747 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3748 mComponentName.c_str()); 3749 err = OK; 3750 } 3751 } 3752 3753 switch (compressionFormat) { 3754 case OMX_VIDEO_CodingMPEG4: 3755 err = setupMPEG4EncoderParameters(msg); 3756 break; 3757 3758 case OMX_VIDEO_CodingH263: 3759 err = setupH263EncoderParameters(msg); 3760 break; 3761 3762 case OMX_VIDEO_CodingAVC: 3763 err = setupAVCEncoderParameters(msg); 3764 break; 3765 3766 case OMX_VIDEO_CodingHEVC: 3767 err = setupHEVCEncoderParameters(msg); 3768 break; 3769 3770 case OMX_VIDEO_CodingVP8: 3771 case OMX_VIDEO_CodingVP9: 3772 err = setupVPXEncoderParameters(msg); 3773 break; 3774 3775 default: 3776 break; 3777 } 3778 3779 // Set up color aspects on input, but propagate them to the output format, as they will 3780 // not be read back from encoder. 3781 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3782 if (err == ERROR_UNSUPPORTED) { 3783 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3784 err = OK; 3785 } 3786 3787 if (err != OK) { 3788 return err; 3789 } 3790 3791 err = setHDRStaticInfoForVideoCodec(kPortIndexInput, msg, outputFormat); 3792 if (err == ERROR_UNSUPPORTED) { // support is optional 3793 ALOGI("[%s] cannot encode HDR static metadata. Ignoring.", mComponentName.c_str()); 3794 err = OK; 3795 } 3796 3797 if (err == OK) { 3798 ALOGI("setupVideoEncoder succeeded"); 3799 } 3800 3801 return err; 3802} 3803 3804status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3805 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3806 InitOMXParams(¶ms); 3807 params.nPortIndex = kPortIndexOutput; 3808 3809 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3810 3811 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3812 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3813 int32_t mbs; 3814 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3815 return INVALID_OPERATION; 3816 } 3817 params.nCirMBs = mbs; 3818 } 3819 3820 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3821 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3822 int32_t mbs; 3823 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3824 return INVALID_OPERATION; 3825 } 3826 params.nAirMBs = mbs; 3827 3828 int32_t ref; 3829 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3830 return INVALID_OPERATION; 3831 } 3832 params.nAirRef = ref; 3833 } 3834 3835 status_t err = mOMX->setParameter( 3836 mNode, OMX_IndexParamVideoIntraRefresh, 3837 ¶ms, sizeof(params)); 3838 return err; 3839} 3840 3841static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) { 3842 if (iFramesInterval < 0) { 3843 return 0xFFFFFFFF; 3844 } else if (iFramesInterval == 0) { 3845 return 0; 3846 } 3847 OMX_U32 ret = frameRate * iFramesInterval; 3848 return ret; 3849} 3850 3851static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 3852 int32_t tmp; 3853 if (!msg->findInt32("bitrate-mode", &tmp)) { 3854 return OMX_Video_ControlRateVariable; 3855 } 3856 3857 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 3858} 3859 3860status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 3861 int32_t bitrate, iFrameInterval; 3862 if (!msg->findInt32("bitrate", &bitrate) 3863 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3864 return INVALID_OPERATION; 3865 } 3866 3867 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3868 3869 float frameRate; 3870 if (!msg->findFloat("frame-rate", &frameRate)) { 3871 int32_t tmp; 3872 if (!msg->findInt32("frame-rate", &tmp)) { 3873 return INVALID_OPERATION; 3874 } 3875 frameRate = (float)tmp; 3876 } 3877 3878 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 3879 InitOMXParams(&mpeg4type); 3880 mpeg4type.nPortIndex = kPortIndexOutput; 3881 3882 status_t err = mOMX->getParameter( 3883 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3884 3885 if (err != OK) { 3886 return err; 3887 } 3888 3889 mpeg4type.nSliceHeaderSpacing = 0; 3890 mpeg4type.bSVH = OMX_FALSE; 3891 mpeg4type.bGov = OMX_FALSE; 3892 3893 mpeg4type.nAllowedPictureTypes = 3894 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3895 3896 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3897 if (mpeg4type.nPFrames == 0) { 3898 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3899 } 3900 mpeg4type.nBFrames = 0; 3901 mpeg4type.nIDCVLCThreshold = 0; 3902 mpeg4type.bACPred = OMX_TRUE; 3903 mpeg4type.nMaxPacketSize = 256; 3904 mpeg4type.nTimeIncRes = 1000; 3905 mpeg4type.nHeaderExtension = 0; 3906 mpeg4type.bReversibleVLC = OMX_FALSE; 3907 3908 int32_t profile; 3909 if (msg->findInt32("profile", &profile)) { 3910 int32_t level; 3911 if (!msg->findInt32("level", &level)) { 3912 return INVALID_OPERATION; 3913 } 3914 3915 err = verifySupportForProfileAndLevel(profile, level); 3916 3917 if (err != OK) { 3918 return err; 3919 } 3920 3921 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 3922 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 3923 } 3924 3925 err = mOMX->setParameter( 3926 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3927 3928 if (err != OK) { 3929 return err; 3930 } 3931 3932 err = configureBitrate(bitrate, bitrateMode); 3933 3934 if (err != OK) { 3935 return err; 3936 } 3937 3938 return setupErrorCorrectionParameters(); 3939} 3940 3941status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 3942 int32_t bitrate, iFrameInterval; 3943 if (!msg->findInt32("bitrate", &bitrate) 3944 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3945 return INVALID_OPERATION; 3946 } 3947 3948 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3949 3950 float frameRate; 3951 if (!msg->findFloat("frame-rate", &frameRate)) { 3952 int32_t tmp; 3953 if (!msg->findInt32("frame-rate", &tmp)) { 3954 return INVALID_OPERATION; 3955 } 3956 frameRate = (float)tmp; 3957 } 3958 3959 OMX_VIDEO_PARAM_H263TYPE h263type; 3960 InitOMXParams(&h263type); 3961 h263type.nPortIndex = kPortIndexOutput; 3962 3963 status_t err = mOMX->getParameter( 3964 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3965 3966 if (err != OK) { 3967 return err; 3968 } 3969 3970 h263type.nAllowedPictureTypes = 3971 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3972 3973 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3974 if (h263type.nPFrames == 0) { 3975 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3976 } 3977 h263type.nBFrames = 0; 3978 3979 int32_t profile; 3980 if (msg->findInt32("profile", &profile)) { 3981 int32_t level; 3982 if (!msg->findInt32("level", &level)) { 3983 return INVALID_OPERATION; 3984 } 3985 3986 err = verifySupportForProfileAndLevel(profile, level); 3987 3988 if (err != OK) { 3989 return err; 3990 } 3991 3992 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 3993 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 3994 } 3995 3996 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 3997 h263type.bForceRoundingTypeToZero = OMX_FALSE; 3998 h263type.nPictureHeaderRepetition = 0; 3999 h263type.nGOBHeaderInterval = 0; 4000 4001 err = mOMX->setParameter( 4002 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 4003 4004 if (err != OK) { 4005 return err; 4006 } 4007 4008 err = configureBitrate(bitrate, bitrateMode); 4009 4010 if (err != OK) { 4011 return err; 4012 } 4013 4014 return setupErrorCorrectionParameters(); 4015} 4016 4017// static 4018int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 4019 int width, int height, int rate, int bitrate, 4020 OMX_VIDEO_AVCPROFILETYPE profile) { 4021 // convert bitrate to main/baseline profile kbps equivalent 4022 switch (profile) { 4023 case OMX_VIDEO_AVCProfileHigh10: 4024 bitrate = divUp(bitrate, 3000); break; 4025 case OMX_VIDEO_AVCProfileHigh: 4026 bitrate = divUp(bitrate, 1250); break; 4027 default: 4028 bitrate = divUp(bitrate, 1000); break; 4029 } 4030 4031 // convert size and rate to MBs 4032 width = divUp(width, 16); 4033 height = divUp(height, 16); 4034 int mbs = width * height; 4035 rate *= mbs; 4036 int maxDimension = max(width, height); 4037 4038 static const int limits[][5] = { 4039 /* MBps MB dim bitrate level */ 4040 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 4041 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 4042 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 4043 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 4044 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 4045 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 4046 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 4047 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 4048 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 4049 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 4050 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 4051 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 4052 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 4053 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 4054 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 4055 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 4056 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 4057 }; 4058 4059 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 4060 const int (&limit)[5] = limits[i]; 4061 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 4062 && bitrate <= limit[3]) { 4063 return limit[4]; 4064 } 4065 } 4066 return 0; 4067} 4068 4069status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 4070 int32_t bitrate, iFrameInterval; 4071 if (!msg->findInt32("bitrate", &bitrate) 4072 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4073 return INVALID_OPERATION; 4074 } 4075 4076 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4077 4078 float frameRate; 4079 if (!msg->findFloat("frame-rate", &frameRate)) { 4080 int32_t tmp; 4081 if (!msg->findInt32("frame-rate", &tmp)) { 4082 return INVALID_OPERATION; 4083 } 4084 frameRate = (float)tmp; 4085 } 4086 4087 status_t err = OK; 4088 int32_t intraRefreshMode = 0; 4089 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 4090 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 4091 if (err != OK) { 4092 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 4093 err, intraRefreshMode); 4094 return err; 4095 } 4096 } 4097 4098 OMX_VIDEO_PARAM_AVCTYPE h264type; 4099 InitOMXParams(&h264type); 4100 h264type.nPortIndex = kPortIndexOutput; 4101 4102 err = mOMX->getParameter( 4103 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4104 4105 if (err != OK) { 4106 return err; 4107 } 4108 4109 h264type.nAllowedPictureTypes = 4110 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4111 4112 int32_t profile; 4113 if (msg->findInt32("profile", &profile)) { 4114 int32_t level; 4115 if (!msg->findInt32("level", &level)) { 4116 return INVALID_OPERATION; 4117 } 4118 4119 err = verifySupportForProfileAndLevel(profile, level); 4120 4121 if (err != OK) { 4122 return err; 4123 } 4124 4125 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 4126 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 4127 } else { 4128 // Use baseline profile for AVC recording if profile is not specified. 4129 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 4130 } 4131 4132 ALOGI("setupAVCEncoderParameters with [profile: %s] [level: %s]", 4133 asString(h264type.eProfile), asString(h264type.eLevel)); 4134 4135 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 4136 h264type.nSliceHeaderSpacing = 0; 4137 h264type.bUseHadamard = OMX_TRUE; 4138 h264type.nRefFrames = 1; 4139 h264type.nBFrames = 0; 4140 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 4141 if (h264type.nPFrames == 0) { 4142 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4143 } 4144 h264type.nRefIdx10ActiveMinus1 = 0; 4145 h264type.nRefIdx11ActiveMinus1 = 0; 4146 h264type.bEntropyCodingCABAC = OMX_FALSE; 4147 h264type.bWeightedPPrediction = OMX_FALSE; 4148 h264type.bconstIpred = OMX_FALSE; 4149 h264type.bDirect8x8Inference = OMX_FALSE; 4150 h264type.bDirectSpatialTemporal = OMX_FALSE; 4151 h264type.nCabacInitIdc = 0; 4152 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 4153 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 4154 h264type.nSliceHeaderSpacing = 0; 4155 h264type.bUseHadamard = OMX_TRUE; 4156 h264type.nRefFrames = 2; 4157 h264type.nBFrames = 1; 4158 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 4159 h264type.nAllowedPictureTypes = 4160 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 4161 h264type.nRefIdx10ActiveMinus1 = 0; 4162 h264type.nRefIdx11ActiveMinus1 = 0; 4163 h264type.bEntropyCodingCABAC = OMX_TRUE; 4164 h264type.bWeightedPPrediction = OMX_TRUE; 4165 h264type.bconstIpred = OMX_TRUE; 4166 h264type.bDirect8x8Inference = OMX_TRUE; 4167 h264type.bDirectSpatialTemporal = OMX_TRUE; 4168 h264type.nCabacInitIdc = 1; 4169 } 4170 4171 if (h264type.nBFrames != 0) { 4172 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 4173 } 4174 4175 h264type.bEnableUEP = OMX_FALSE; 4176 h264type.bEnableFMO = OMX_FALSE; 4177 h264type.bEnableASO = OMX_FALSE; 4178 h264type.bEnableRS = OMX_FALSE; 4179 h264type.bFrameMBsOnly = OMX_TRUE; 4180 h264type.bMBAFF = OMX_FALSE; 4181 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 4182 4183 err = mOMX->setParameter( 4184 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4185 4186 if (err != OK) { 4187 return err; 4188 } 4189 4190 return configureBitrate(bitrate, bitrateMode); 4191} 4192 4193status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 4194 int32_t bitrate, iFrameInterval; 4195 if (!msg->findInt32("bitrate", &bitrate) 4196 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4197 return INVALID_OPERATION; 4198 } 4199 4200 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4201 4202 float frameRate; 4203 if (!msg->findFloat("frame-rate", &frameRate)) { 4204 int32_t tmp; 4205 if (!msg->findInt32("frame-rate", &tmp)) { 4206 return INVALID_OPERATION; 4207 } 4208 frameRate = (float)tmp; 4209 } 4210 4211 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 4212 InitOMXParams(&hevcType); 4213 hevcType.nPortIndex = kPortIndexOutput; 4214 4215 status_t err = OK; 4216 err = mOMX->getParameter( 4217 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4218 if (err != OK) { 4219 return err; 4220 } 4221 4222 int32_t profile; 4223 if (msg->findInt32("profile", &profile)) { 4224 int32_t level; 4225 if (!msg->findInt32("level", &level)) { 4226 return INVALID_OPERATION; 4227 } 4228 4229 err = verifySupportForProfileAndLevel(profile, level); 4230 if (err != OK) { 4231 return err; 4232 } 4233 4234 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 4235 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 4236 } 4237 // TODO: finer control? 4238 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4239 4240 err = mOMX->setParameter( 4241 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4242 if (err != OK) { 4243 return err; 4244 } 4245 4246 return configureBitrate(bitrate, bitrateMode); 4247} 4248 4249status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { 4250 int32_t bitrate; 4251 int32_t iFrameInterval = 0; 4252 size_t tsLayers = 0; 4253 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 4254 OMX_VIDEO_VPXTemporalLayerPatternNone; 4255 static const uint32_t kVp8LayerRateAlloction 4256 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 4257 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 4258 {100, 100, 100}, // 1 layer 4259 { 60, 100, 100}, // 2 layers {60%, 40%} 4260 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 4261 }; 4262 if (!msg->findInt32("bitrate", &bitrate)) { 4263 return INVALID_OPERATION; 4264 } 4265 msg->findInt32("i-frame-interval", &iFrameInterval); 4266 4267 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4268 4269 float frameRate; 4270 if (!msg->findFloat("frame-rate", &frameRate)) { 4271 int32_t tmp; 4272 if (!msg->findInt32("frame-rate", &tmp)) { 4273 return INVALID_OPERATION; 4274 } 4275 frameRate = (float)tmp; 4276 } 4277 4278 AString tsSchema; 4279 if (msg->findString("ts-schema", &tsSchema)) { 4280 if (tsSchema == "webrtc.vp8.1-layer") { 4281 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4282 tsLayers = 1; 4283 } else if (tsSchema == "webrtc.vp8.2-layer") { 4284 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4285 tsLayers = 2; 4286 } else if (tsSchema == "webrtc.vp8.3-layer") { 4287 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4288 tsLayers = 3; 4289 } else { 4290 ALOGW("Unsupported ts-schema [%s]", tsSchema.c_str()); 4291 } 4292 } 4293 4294 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4295 InitOMXParams(&vp8type); 4296 vp8type.nPortIndex = kPortIndexOutput; 4297 status_t err = mOMX->getParameter( 4298 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4299 &vp8type, sizeof(vp8type)); 4300 4301 if (err == OK) { 4302 if (iFrameInterval > 0) { 4303 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4304 } 4305 vp8type.eTemporalPattern = pattern; 4306 vp8type.nTemporalLayerCount = tsLayers; 4307 if (tsLayers > 0) { 4308 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4309 vp8type.nTemporalLayerBitrateRatio[i] = 4310 kVp8LayerRateAlloction[tsLayers - 1][i]; 4311 } 4312 } 4313 if (bitrateMode == OMX_Video_ControlRateConstant) { 4314 vp8type.nMinQuantizer = 2; 4315 vp8type.nMaxQuantizer = 63; 4316 } 4317 4318 err = mOMX->setParameter( 4319 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4320 &vp8type, sizeof(vp8type)); 4321 if (err != OK) { 4322 ALOGW("Extended VP8 parameters set failed: %d", err); 4323 } 4324 } 4325 4326 return configureBitrate(bitrate, bitrateMode); 4327} 4328 4329status_t ACodec::verifySupportForProfileAndLevel( 4330 int32_t profile, int32_t level) { 4331 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4332 InitOMXParams(¶ms); 4333 params.nPortIndex = kPortIndexOutput; 4334 4335 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4336 params.nProfileIndex = index; 4337 status_t err = mOMX->getParameter( 4338 mNode, 4339 OMX_IndexParamVideoProfileLevelQuerySupported, 4340 ¶ms, 4341 sizeof(params)); 4342 4343 if (err != OK) { 4344 return err; 4345 } 4346 4347 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4348 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4349 4350 if (profile == supportedProfile && level <= supportedLevel) { 4351 return OK; 4352 } 4353 4354 if (index == kMaxIndicesToCheck) { 4355 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4356 mComponentName.c_str(), index, 4357 params.eProfile, params.eLevel); 4358 } 4359 } 4360 return ERROR_UNSUPPORTED; 4361} 4362 4363status_t ACodec::configureBitrate( 4364 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4365 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4366 InitOMXParams(&bitrateType); 4367 bitrateType.nPortIndex = kPortIndexOutput; 4368 4369 status_t err = mOMX->getParameter( 4370 mNode, OMX_IndexParamVideoBitrate, 4371 &bitrateType, sizeof(bitrateType)); 4372 4373 if (err != OK) { 4374 return err; 4375 } 4376 4377 bitrateType.eControlRate = bitrateMode; 4378 bitrateType.nTargetBitrate = bitrate; 4379 4380 return mOMX->setParameter( 4381 mNode, OMX_IndexParamVideoBitrate, 4382 &bitrateType, sizeof(bitrateType)); 4383} 4384 4385status_t ACodec::setupErrorCorrectionParameters() { 4386 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4387 InitOMXParams(&errorCorrectionType); 4388 errorCorrectionType.nPortIndex = kPortIndexOutput; 4389 4390 status_t err = mOMX->getParameter( 4391 mNode, OMX_IndexParamVideoErrorCorrection, 4392 &errorCorrectionType, sizeof(errorCorrectionType)); 4393 4394 if (err != OK) { 4395 return OK; // Optional feature. Ignore this failure 4396 } 4397 4398 errorCorrectionType.bEnableHEC = OMX_FALSE; 4399 errorCorrectionType.bEnableResync = OMX_TRUE; 4400 errorCorrectionType.nResynchMarkerSpacing = 256; 4401 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4402 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4403 4404 return mOMX->setParameter( 4405 mNode, OMX_IndexParamVideoErrorCorrection, 4406 &errorCorrectionType, sizeof(errorCorrectionType)); 4407} 4408 4409status_t ACodec::setVideoFormatOnPort( 4410 OMX_U32 portIndex, 4411 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4412 float frameRate) { 4413 OMX_PARAM_PORTDEFINITIONTYPE def; 4414 InitOMXParams(&def); 4415 def.nPortIndex = portIndex; 4416 4417 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4418 4419 status_t err = mOMX->getParameter( 4420 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4421 if (err != OK) { 4422 return err; 4423 } 4424 4425 if (portIndex == kPortIndexInput) { 4426 // XXX Need a (much) better heuristic to compute input buffer sizes. 4427 const size_t X = 64 * 1024; 4428 if (def.nBufferSize < X) { 4429 def.nBufferSize = X; 4430 } 4431 } 4432 4433 if (def.eDomain != OMX_PortDomainVideo) { 4434 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4435 return FAILED_TRANSACTION; 4436 } 4437 4438 video_def->nFrameWidth = width; 4439 video_def->nFrameHeight = height; 4440 4441 if (portIndex == kPortIndexInput) { 4442 video_def->eCompressionFormat = compressionFormat; 4443 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4444 if (frameRate >= 0) { 4445 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4446 } 4447 } 4448 4449 err = mOMX->setParameter( 4450 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4451 4452 return err; 4453} 4454 4455status_t ACodec::initNativeWindow() { 4456 if (mNativeWindow != NULL) { 4457 return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4458 } 4459 4460 mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4461 return OK; 4462} 4463 4464size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4465 size_t n = 0; 4466 4467 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4468 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4469 4470 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4471 ++n; 4472 } 4473 } 4474 4475 return n; 4476} 4477 4478size_t ACodec::countBuffersOwnedByNativeWindow() const { 4479 size_t n = 0; 4480 4481 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4482 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4483 4484 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4485 ++n; 4486 } 4487 } 4488 4489 return n; 4490} 4491 4492void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4493 if (mNativeWindow == NULL) { 4494 return; 4495 } 4496 4497 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4498 && dequeueBufferFromNativeWindow() != NULL) { 4499 // these buffers will be submitted as regular buffers; account for this 4500 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4501 --mMetadataBuffersToSubmit; 4502 } 4503 } 4504} 4505 4506bool ACodec::allYourBuffersAreBelongToUs( 4507 OMX_U32 portIndex) { 4508 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4509 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4510 4511 if (info->mStatus != BufferInfo::OWNED_BY_US 4512 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4513 ALOGV("[%s] Buffer %u on port %u still has status %d", 4514 mComponentName.c_str(), 4515 info->mBufferID, portIndex, info->mStatus); 4516 return false; 4517 } 4518 } 4519 4520 return true; 4521} 4522 4523bool ACodec::allYourBuffersAreBelongToUs() { 4524 return allYourBuffersAreBelongToUs(kPortIndexInput) 4525 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4526} 4527 4528void ACodec::deferMessage(const sp<AMessage> &msg) { 4529 mDeferredQueue.push_back(msg); 4530} 4531 4532void ACodec::processDeferredMessages() { 4533 List<sp<AMessage> > queue = mDeferredQueue; 4534 mDeferredQueue.clear(); 4535 4536 List<sp<AMessage> >::iterator it = queue.begin(); 4537 while (it != queue.end()) { 4538 onMessageReceived(*it++); 4539 } 4540} 4541 4542// static 4543bool ACodec::describeDefaultColorFormat(DescribeColorFormat2Params ¶ms) { 4544 MediaImage2 &image = params.sMediaImage; 4545 memset(&image, 0, sizeof(image)); 4546 4547 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4548 image.mNumPlanes = 0; 4549 4550 const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat; 4551 image.mWidth = params.nFrameWidth; 4552 image.mHeight = params.nFrameHeight; 4553 4554 // only supporting YUV420 4555 if (fmt != OMX_COLOR_FormatYUV420Planar && 4556 fmt != OMX_COLOR_FormatYUV420PackedPlanar && 4557 fmt != OMX_COLOR_FormatYUV420SemiPlanar && 4558 fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar && 4559 fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) { 4560 ALOGW("do not know color format 0x%x = %d", fmt, fmt); 4561 return false; 4562 } 4563 4564 // TEMPORARY FIX for some vendors that advertise sliceHeight as 0 4565 if (params.nStride != 0 && params.nSliceHeight == 0) { 4566 ALOGW("using sliceHeight=%u instead of what codec advertised (=0)", 4567 params.nFrameHeight); 4568 params.nSliceHeight = params.nFrameHeight; 4569 } 4570 4571 // we need stride and slice-height to be non-zero and sensible. These values were chosen to 4572 // prevent integer overflows further down the line, and do not indicate support for 4573 // 32kx32k video. 4574 if (params.nStride == 0 || params.nSliceHeight == 0 4575 || params.nStride > 32768 || params.nSliceHeight > 32768) { 4576 ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u", 4577 fmt, fmt, params.nStride, params.nSliceHeight); 4578 return false; 4579 } 4580 4581 // set-up YUV format 4582 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV; 4583 image.mNumPlanes = 3; 4584 image.mBitDepth = 8; 4585 image.mBitDepthAllocated = 8; 4586 image.mPlane[image.Y].mOffset = 0; 4587 image.mPlane[image.Y].mColInc = 1; 4588 image.mPlane[image.Y].mRowInc = params.nStride; 4589 image.mPlane[image.Y].mHorizSubsampling = 1; 4590 image.mPlane[image.Y].mVertSubsampling = 1; 4591 4592 switch ((int)fmt) { 4593 case HAL_PIXEL_FORMAT_YV12: 4594 if (params.bUsingNativeBuffers) { 4595 size_t ystride = align(params.nStride, 16); 4596 size_t cstride = align(params.nStride / 2, 16); 4597 image.mPlane[image.Y].mRowInc = ystride; 4598 4599 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight; 4600 image.mPlane[image.V].mColInc = 1; 4601 image.mPlane[image.V].mRowInc = cstride; 4602 image.mPlane[image.V].mHorizSubsampling = 2; 4603 image.mPlane[image.V].mVertSubsampling = 2; 4604 4605 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset 4606 + (cstride * params.nSliceHeight / 2); 4607 image.mPlane[image.U].mColInc = 1; 4608 image.mPlane[image.U].mRowInc = cstride; 4609 image.mPlane[image.U].mHorizSubsampling = 2; 4610 image.mPlane[image.U].mVertSubsampling = 2; 4611 break; 4612 } else { 4613 // fall through as YV12 is used for YUV420Planar by some codecs 4614 } 4615 4616 case OMX_COLOR_FormatYUV420Planar: 4617 case OMX_COLOR_FormatYUV420PackedPlanar: 4618 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4619 image.mPlane[image.U].mColInc = 1; 4620 image.mPlane[image.U].mRowInc = params.nStride / 2; 4621 image.mPlane[image.U].mHorizSubsampling = 2; 4622 image.mPlane[image.U].mVertSubsampling = 2; 4623 4624 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset 4625 + (params.nStride * params.nSliceHeight / 4); 4626 image.mPlane[image.V].mColInc = 1; 4627 image.mPlane[image.V].mRowInc = params.nStride / 2; 4628 image.mPlane[image.V].mHorizSubsampling = 2; 4629 image.mPlane[image.V].mVertSubsampling = 2; 4630 break; 4631 4632 case OMX_COLOR_FormatYUV420SemiPlanar: 4633 // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder 4634 case OMX_COLOR_FormatYUV420PackedSemiPlanar: 4635 // NV12 4636 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4637 image.mPlane[image.U].mColInc = 2; 4638 image.mPlane[image.U].mRowInc = params.nStride; 4639 image.mPlane[image.U].mHorizSubsampling = 2; 4640 image.mPlane[image.U].mVertSubsampling = 2; 4641 4642 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1; 4643 image.mPlane[image.V].mColInc = 2; 4644 image.mPlane[image.V].mRowInc = params.nStride; 4645 image.mPlane[image.V].mHorizSubsampling = 2; 4646 image.mPlane[image.V].mVertSubsampling = 2; 4647 break; 4648 4649 default: 4650 TRESPASS(); 4651 } 4652 return true; 4653} 4654 4655// static 4656bool ACodec::describeColorFormat( 4657 const sp<IOMX> &omx, IOMX::node_id node, 4658 DescribeColorFormat2Params &describeParams) 4659{ 4660 OMX_INDEXTYPE describeColorFormatIndex; 4661 if (omx->getExtensionIndex( 4662 node, "OMX.google.android.index.describeColorFormat", 4663 &describeColorFormatIndex) == OK) { 4664 DescribeColorFormatParams describeParamsV1(describeParams); 4665 if (omx->getParameter( 4666 node, describeColorFormatIndex, 4667 &describeParamsV1, sizeof(describeParamsV1)) == OK) { 4668 describeParams.initFromV1(describeParamsV1); 4669 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4670 } 4671 } else if (omx->getExtensionIndex( 4672 node, "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK 4673 && omx->getParameter( 4674 node, describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) { 4675 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4676 } 4677 4678 return describeDefaultColorFormat(describeParams); 4679} 4680 4681// static 4682bool ACodec::isFlexibleColorFormat( 4683 const sp<IOMX> &omx, IOMX::node_id node, 4684 uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) { 4685 DescribeColorFormat2Params describeParams; 4686 InitOMXParams(&describeParams); 4687 describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; 4688 // reasonable dummy values 4689 describeParams.nFrameWidth = 128; 4690 describeParams.nFrameHeight = 128; 4691 describeParams.nStride = 128; 4692 describeParams.nSliceHeight = 128; 4693 describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers; 4694 4695 CHECK(flexibleEquivalent != NULL); 4696 4697 if (!describeColorFormat(omx, node, describeParams)) { 4698 return false; 4699 } 4700 4701 const MediaImage2 &img = describeParams.sMediaImage; 4702 if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) { 4703 if (img.mNumPlanes != 3 4704 || img.mPlane[img.Y].mHorizSubsampling != 1 4705 || img.mPlane[img.Y].mVertSubsampling != 1) { 4706 return false; 4707 } 4708 4709 // YUV 420 4710 if (img.mPlane[img.U].mHorizSubsampling == 2 4711 && img.mPlane[img.U].mVertSubsampling == 2 4712 && img.mPlane[img.V].mHorizSubsampling == 2 4713 && img.mPlane[img.V].mVertSubsampling == 2) { 4714 // possible flexible YUV420 format 4715 if (img.mBitDepth <= 8) { 4716 *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible; 4717 return true; 4718 } 4719 } 4720 } 4721 return false; 4722} 4723 4724status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4725 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4726 OMX_PARAM_PORTDEFINITIONTYPE def; 4727 InitOMXParams(&def); 4728 def.nPortIndex = portIndex; 4729 4730 status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4731 if (err != OK) { 4732 return err; 4733 } 4734 4735 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4736 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4737 return BAD_VALUE; 4738 } 4739 4740 switch (def.eDomain) { 4741 case OMX_PortDomainVideo: 4742 { 4743 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4744 switch ((int)videoDef->eCompressionFormat) { 4745 case OMX_VIDEO_CodingUnused: 4746 { 4747 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4748 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4749 4750 notify->setInt32("stride", videoDef->nStride); 4751 notify->setInt32("slice-height", videoDef->nSliceHeight); 4752 notify->setInt32("color-format", videoDef->eColorFormat); 4753 4754 if (mNativeWindow == NULL) { 4755 DescribeColorFormat2Params describeParams; 4756 InitOMXParams(&describeParams); 4757 describeParams.eColorFormat = videoDef->eColorFormat; 4758 describeParams.nFrameWidth = videoDef->nFrameWidth; 4759 describeParams.nFrameHeight = videoDef->nFrameHeight; 4760 describeParams.nStride = videoDef->nStride; 4761 describeParams.nSliceHeight = videoDef->nSliceHeight; 4762 describeParams.bUsingNativeBuffers = OMX_FALSE; 4763 4764 if (describeColorFormat(mOMX, mNode, describeParams)) { 4765 notify->setBuffer( 4766 "image-data", 4767 ABuffer::CreateAsCopy( 4768 &describeParams.sMediaImage, 4769 sizeof(describeParams.sMediaImage))); 4770 4771 MediaImage2 &img = describeParams.sMediaImage; 4772 MediaImage2::PlaneInfo *plane = img.mPlane; 4773 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4774 mComponentName.c_str(), img.mWidth, img.mHeight, 4775 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4776 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4777 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4778 } 4779 } 4780 4781 int32_t width = (int32_t)videoDef->nFrameWidth; 4782 int32_t height = (int32_t)videoDef->nFrameHeight; 4783 4784 if (portIndex == kPortIndexOutput) { 4785 OMX_CONFIG_RECTTYPE rect; 4786 InitOMXParams(&rect); 4787 rect.nPortIndex = portIndex; 4788 4789 if (mOMX->getConfig( 4790 mNode, 4791 (portIndex == kPortIndexOutput ? 4792 OMX_IndexConfigCommonOutputCrop : 4793 OMX_IndexConfigCommonInputCrop), 4794 &rect, sizeof(rect)) != OK) { 4795 rect.nLeft = 0; 4796 rect.nTop = 0; 4797 rect.nWidth = videoDef->nFrameWidth; 4798 rect.nHeight = videoDef->nFrameHeight; 4799 } 4800 4801 if (rect.nLeft < 0 || 4802 rect.nTop < 0 || 4803 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4804 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4805 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 4806 rect.nLeft, rect.nTop, 4807 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 4808 videoDef->nFrameWidth, videoDef->nFrameHeight); 4809 return BAD_VALUE; 4810 } 4811 4812 notify->setRect( 4813 "crop", 4814 rect.nLeft, 4815 rect.nTop, 4816 rect.nLeft + rect.nWidth - 1, 4817 rect.nTop + rect.nHeight - 1); 4818 4819 width = rect.nWidth; 4820 height = rect.nHeight; 4821 4822 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4823 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4824 width, height, mConfigFormat, notify, 4825 mUsingNativeWindow ? &dataSpace : NULL); 4826 if (mUsingNativeWindow) { 4827 notify->setInt32("android._dataspace", dataSpace); 4828 } 4829 (void)getHDRStaticInfoForVideoCodec(kPortIndexOutput, notify); 4830 } else { 4831 (void)getInputColorAspectsForVideoEncoder(notify); 4832 if (mConfigFormat->contains("hdr-static-info")) { 4833 (void)getHDRStaticInfoForVideoCodec(kPortIndexInput, notify); 4834 } 4835 } 4836 4837 break; 4838 } 4839 4840 case OMX_VIDEO_CodingVP8: 4841 case OMX_VIDEO_CodingVP9: 4842 { 4843 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4844 InitOMXParams(&vp8type); 4845 vp8type.nPortIndex = kPortIndexOutput; 4846 status_t err = mOMX->getParameter( 4847 mNode, 4848 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4849 &vp8type, 4850 sizeof(vp8type)); 4851 4852 if (err == OK) { 4853 AString tsSchema = "none"; 4854 if (vp8type.eTemporalPattern 4855 == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { 4856 switch (vp8type.nTemporalLayerCount) { 4857 case 1: 4858 { 4859 tsSchema = "webrtc.vp8.1-layer"; 4860 break; 4861 } 4862 case 2: 4863 { 4864 tsSchema = "webrtc.vp8.2-layer"; 4865 break; 4866 } 4867 case 3: 4868 { 4869 tsSchema = "webrtc.vp8.3-layer"; 4870 break; 4871 } 4872 default: 4873 { 4874 break; 4875 } 4876 } 4877 } 4878 notify->setString("ts-schema", tsSchema); 4879 } 4880 // Fall through to set up mime. 4881 } 4882 4883 default: 4884 { 4885 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 4886 // should be CodingUnused 4887 ALOGE("Raw port video compression format is %s(%d)", 4888 asString(videoDef->eCompressionFormat), 4889 videoDef->eCompressionFormat); 4890 return BAD_VALUE; 4891 } 4892 AString mime; 4893 if (GetMimeTypeForVideoCoding( 4894 videoDef->eCompressionFormat, &mime) != OK) { 4895 notify->setString("mime", "application/octet-stream"); 4896 } else { 4897 notify->setString("mime", mime.c_str()); 4898 } 4899 uint32_t intraRefreshPeriod = 0; 4900 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 4901 && intraRefreshPeriod > 0) { 4902 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 4903 } 4904 break; 4905 } 4906 } 4907 notify->setInt32("width", videoDef->nFrameWidth); 4908 notify->setInt32("height", videoDef->nFrameHeight); 4909 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 4910 portIndex == kPortIndexInput ? "input" : "output", 4911 notify->debugString().c_str()); 4912 4913 break; 4914 } 4915 4916 case OMX_PortDomainAudio: 4917 { 4918 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 4919 4920 switch ((int)audioDef->eEncoding) { 4921 case OMX_AUDIO_CodingPCM: 4922 { 4923 OMX_AUDIO_PARAM_PCMMODETYPE params; 4924 InitOMXParams(¶ms); 4925 params.nPortIndex = portIndex; 4926 4927 err = mOMX->getParameter( 4928 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4929 if (err != OK) { 4930 return err; 4931 } 4932 4933 if (params.nChannels <= 0 4934 || (params.nChannels != 1 && !params.bInterleaved) 4935 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 4936 ALOGE("unsupported PCM port: %u channels%s, %u-bit", 4937 params.nChannels, 4938 params.bInterleaved ? " interleaved" : "", 4939 params.nBitPerSample); 4940 return FAILED_TRANSACTION; 4941 } 4942 4943 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 4944 notify->setInt32("channel-count", params.nChannels); 4945 notify->setInt32("sample-rate", params.nSamplingRate); 4946 4947 AudioEncoding encoding = kAudioEncodingPcm16bit; 4948 if (params.eNumData == OMX_NumericalDataUnsigned 4949 && params.nBitPerSample == 8u) { 4950 encoding = kAudioEncodingPcm8bit; 4951 } else if (params.eNumData == OMX_NumericalDataFloat 4952 && params.nBitPerSample == 32u) { 4953 encoding = kAudioEncodingPcmFloat; 4954 } else if (params.nBitPerSample != 16u 4955 || params.eNumData != OMX_NumericalDataSigned) { 4956 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", 4957 asString(params.eNumData), params.eNumData, 4958 asString(params.ePCMMode), params.ePCMMode); 4959 return FAILED_TRANSACTION; 4960 } 4961 notify->setInt32("pcm-encoding", encoding); 4962 4963 if (mChannelMaskPresent) { 4964 notify->setInt32("channel-mask", mChannelMask); 4965 } 4966 break; 4967 } 4968 4969 case OMX_AUDIO_CodingAAC: 4970 { 4971 OMX_AUDIO_PARAM_AACPROFILETYPE params; 4972 InitOMXParams(¶ms); 4973 params.nPortIndex = portIndex; 4974 4975 err = mOMX->getParameter( 4976 mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 4977 if (err != OK) { 4978 return err; 4979 } 4980 4981 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 4982 notify->setInt32("channel-count", params.nChannels); 4983 notify->setInt32("sample-rate", params.nSampleRate); 4984 break; 4985 } 4986 4987 case OMX_AUDIO_CodingAMR: 4988 { 4989 OMX_AUDIO_PARAM_AMRTYPE params; 4990 InitOMXParams(¶ms); 4991 params.nPortIndex = portIndex; 4992 4993 err = mOMX->getParameter( 4994 mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 4995 if (err != OK) { 4996 return err; 4997 } 4998 4999 notify->setInt32("channel-count", 1); 5000 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 5001 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 5002 notify->setInt32("sample-rate", 16000); 5003 } else { 5004 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 5005 notify->setInt32("sample-rate", 8000); 5006 } 5007 break; 5008 } 5009 5010 case OMX_AUDIO_CodingFLAC: 5011 { 5012 OMX_AUDIO_PARAM_FLACTYPE params; 5013 InitOMXParams(¶ms); 5014 params.nPortIndex = portIndex; 5015 5016 err = mOMX->getParameter( 5017 mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 5018 if (err != OK) { 5019 return err; 5020 } 5021 5022 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 5023 notify->setInt32("channel-count", params.nChannels); 5024 notify->setInt32("sample-rate", params.nSampleRate); 5025 break; 5026 } 5027 5028 case OMX_AUDIO_CodingMP3: 5029 { 5030 OMX_AUDIO_PARAM_MP3TYPE params; 5031 InitOMXParams(¶ms); 5032 params.nPortIndex = portIndex; 5033 5034 err = mOMX->getParameter( 5035 mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 5036 if (err != OK) { 5037 return err; 5038 } 5039 5040 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 5041 notify->setInt32("channel-count", params.nChannels); 5042 notify->setInt32("sample-rate", params.nSampleRate); 5043 break; 5044 } 5045 5046 case OMX_AUDIO_CodingVORBIS: 5047 { 5048 OMX_AUDIO_PARAM_VORBISTYPE params; 5049 InitOMXParams(¶ms); 5050 params.nPortIndex = portIndex; 5051 5052 err = mOMX->getParameter( 5053 mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 5054 if (err != OK) { 5055 return err; 5056 } 5057 5058 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 5059 notify->setInt32("channel-count", params.nChannels); 5060 notify->setInt32("sample-rate", params.nSampleRate); 5061 break; 5062 } 5063 5064 case OMX_AUDIO_CodingAndroidAC3: 5065 { 5066 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 5067 InitOMXParams(¶ms); 5068 params.nPortIndex = portIndex; 5069 5070 err = mOMX->getParameter( 5071 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 5072 ¶ms, sizeof(params)); 5073 if (err != OK) { 5074 return err; 5075 } 5076 5077 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 5078 notify->setInt32("channel-count", params.nChannels); 5079 notify->setInt32("sample-rate", params.nSampleRate); 5080 break; 5081 } 5082 5083 case OMX_AUDIO_CodingAndroidEAC3: 5084 { 5085 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 5086 InitOMXParams(¶ms); 5087 params.nPortIndex = portIndex; 5088 5089 err = mOMX->getParameter( 5090 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 5091 ¶ms, sizeof(params)); 5092 if (err != OK) { 5093 return err; 5094 } 5095 5096 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 5097 notify->setInt32("channel-count", params.nChannels); 5098 notify->setInt32("sample-rate", params.nSampleRate); 5099 break; 5100 } 5101 5102 case OMX_AUDIO_CodingAndroidOPUS: 5103 { 5104 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 5105 InitOMXParams(¶ms); 5106 params.nPortIndex = portIndex; 5107 5108 err = mOMX->getParameter( 5109 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 5110 ¶ms, sizeof(params)); 5111 if (err != OK) { 5112 return err; 5113 } 5114 5115 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 5116 notify->setInt32("channel-count", params.nChannels); 5117 notify->setInt32("sample-rate", params.nSampleRate); 5118 break; 5119 } 5120 5121 case OMX_AUDIO_CodingG711: 5122 { 5123 OMX_AUDIO_PARAM_PCMMODETYPE params; 5124 InitOMXParams(¶ms); 5125 params.nPortIndex = portIndex; 5126 5127 err = mOMX->getParameter( 5128 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5129 if (err != OK) { 5130 return err; 5131 } 5132 5133 const char *mime = NULL; 5134 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 5135 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 5136 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 5137 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 5138 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 5139 mime = MEDIA_MIMETYPE_AUDIO_RAW; 5140 } 5141 notify->setString("mime", mime); 5142 notify->setInt32("channel-count", params.nChannels); 5143 notify->setInt32("sample-rate", params.nSamplingRate); 5144 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 5145 break; 5146 } 5147 5148 case OMX_AUDIO_CodingGSMFR: 5149 { 5150 OMX_AUDIO_PARAM_PCMMODETYPE params; 5151 InitOMXParams(¶ms); 5152 params.nPortIndex = portIndex; 5153 5154 err = mOMX->getParameter( 5155 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5156 if (err != OK) { 5157 return err; 5158 } 5159 5160 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 5161 notify->setInt32("channel-count", params.nChannels); 5162 notify->setInt32("sample-rate", params.nSamplingRate); 5163 break; 5164 } 5165 5166 default: 5167 ALOGE("Unsupported audio coding: %s(%d)\n", 5168 asString(audioDef->eEncoding), audioDef->eEncoding); 5169 return BAD_TYPE; 5170 } 5171 break; 5172 } 5173 5174 default: 5175 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 5176 return BAD_TYPE; 5177 } 5178 5179 return OK; 5180} 5181 5182void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 5183 // aspects are normally communicated in ColorAspects 5184 int32_t range, standard, transfer; 5185 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 5186 5187 // if some aspects are unspecified, use dataspace fields 5188 if (range != 0) { 5189 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 5190 } 5191 if (standard != 0) { 5192 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 5193 } 5194 if (transfer != 0) { 5195 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 5196 } 5197 5198 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 5199 if (range != 0) { 5200 mOutputFormat->setInt32("color-range", range); 5201 } 5202 if (standard != 0) { 5203 mOutputFormat->setInt32("color-standard", standard); 5204 } 5205 if (transfer != 0) { 5206 mOutputFormat->setInt32("color-transfer", transfer); 5207 } 5208 5209 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 5210 "(R:%d(%s), S:%d(%s), T:%d(%s))", 5211 dataSpace, 5212 aspects.mRange, asString(aspects.mRange), 5213 aspects.mPrimaries, asString(aspects.mPrimaries), 5214 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 5215 aspects.mTransfer, asString(aspects.mTransfer), 5216 range, asString((ColorRange)range), 5217 standard, asString((ColorStandard)standard), 5218 transfer, asString((ColorTransfer)transfer)); 5219} 5220 5221void ACodec::onOutputFormatChanged(sp<const AMessage> expectedFormat) { 5222 // store new output format, at the same time mark that this is no longer the first frame 5223 mOutputFormat = mBaseOutputFormat->dup(); 5224 5225 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 5226 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 5227 return; 5228 } 5229 5230 if (expectedFormat != NULL) { 5231 sp<const AMessage> changes = expectedFormat->changesFrom(mOutputFormat); 5232 sp<const AMessage> to = mOutputFormat->changesFrom(expectedFormat); 5233 if (changes->countEntries() != 0 || to->countEntries() != 0) { 5234 ALOGW("[%s] BAD CODEC: Output format changed unexpectedly from (diff) %s to (diff) %s", 5235 mComponentName.c_str(), 5236 changes->debugString(4).c_str(), to->debugString(4).c_str()); 5237 } 5238 } 5239 5240 if (!mIsVideo && !mIsEncoder) { 5241 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 5242 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5243 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 5244 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5245 5246 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 5247 if (mConverter[kPortIndexOutput] != NULL) { 5248 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 5249 } 5250 } 5251 5252 if (mTunneled) { 5253 sendFormatChange(); 5254 } 5255} 5256 5257void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 5258 AString mime; 5259 CHECK(mOutputFormat->findString("mime", &mime)); 5260 5261 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 5262 // notify renderer of the crop change and dataspace change 5263 // NOTE: native window uses extended right-bottom coordinate 5264 int32_t left, top, right, bottom; 5265 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 5266 notify->setRect("crop", left, top, right + 1, bottom + 1); 5267 } 5268 5269 int32_t dataSpace; 5270 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 5271 notify->setInt32("dataspace", dataSpace); 5272 } 5273 } 5274} 5275 5276void ACodec::sendFormatChange() { 5277 AString mime; 5278 CHECK(mOutputFormat->findString("mime", &mime)); 5279 5280 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 5281 int32_t channelCount; 5282 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 5283 if (mSkipCutBuffer != NULL) { 5284 size_t prevbufsize = mSkipCutBuffer->size(); 5285 if (prevbufsize != 0) { 5286 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 5287 } 5288 } 5289 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 5290 } 5291 5292 sp<AMessage> notify = mNotify->dup(); 5293 notify->setInt32("what", kWhatOutputFormatChanged); 5294 notify->setMessage("format", mOutputFormat); 5295 notify->post(); 5296 5297 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 5298 mLastOutputFormat = mOutputFormat; 5299} 5300 5301void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 5302 sp<AMessage> notify = mNotify->dup(); 5303 notify->setInt32("what", CodecBase::kWhatError); 5304 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 5305 5306 if (internalError == UNKNOWN_ERROR) { // find better error code 5307 const status_t omxStatus = statusFromOMXError(error); 5308 if (omxStatus != 0) { 5309 internalError = omxStatus; 5310 } else { 5311 ALOGW("Invalid OMX error %#x", error); 5312 } 5313 } 5314 5315 mFatalError = true; 5316 5317 notify->setInt32("err", internalError); 5318 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5319 notify->post(); 5320} 5321 5322//////////////////////////////////////////////////////////////////////////////// 5323 5324ACodec::PortDescription::PortDescription() { 5325} 5326 5327status_t ACodec::requestIDRFrame() { 5328 if (!mIsEncoder) { 5329 return ERROR_UNSUPPORTED; 5330 } 5331 5332 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5333 InitOMXParams(¶ms); 5334 5335 params.nPortIndex = kPortIndexOutput; 5336 params.IntraRefreshVOP = OMX_TRUE; 5337 5338 return mOMX->setConfig( 5339 mNode, 5340 OMX_IndexConfigVideoIntraVOPRefresh, 5341 ¶ms, 5342 sizeof(params)); 5343} 5344 5345void ACodec::PortDescription::addBuffer( 5346 IOMX::buffer_id id, const sp<ABuffer> &buffer, 5347 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) { 5348 mBufferIDs.push_back(id); 5349 mBuffers.push_back(buffer); 5350 mHandles.push_back(handle); 5351 mMemRefs.push_back(memRef); 5352} 5353 5354size_t ACodec::PortDescription::countBuffers() { 5355 return mBufferIDs.size(); 5356} 5357 5358IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5359 return mBufferIDs.itemAt(index); 5360} 5361 5362sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5363 return mBuffers.itemAt(index); 5364} 5365 5366sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const { 5367 return mHandles.itemAt(index); 5368} 5369 5370sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const { 5371 return mMemRefs.itemAt(index); 5372} 5373 5374//////////////////////////////////////////////////////////////////////////////// 5375 5376ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5377 : AState(parentState), 5378 mCodec(codec) { 5379} 5380 5381ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5382 OMX_U32 /* portIndex */) { 5383 return KEEP_BUFFERS; 5384} 5385 5386bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5387 switch (msg->what()) { 5388 case kWhatInputBufferFilled: 5389 { 5390 onInputBufferFilled(msg); 5391 break; 5392 } 5393 5394 case kWhatOutputBufferDrained: 5395 { 5396 onOutputBufferDrained(msg); 5397 break; 5398 } 5399 5400 case ACodec::kWhatOMXMessageList: 5401 { 5402 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5403 } 5404 5405 case ACodec::kWhatOMXMessageItem: 5406 { 5407 // no need to check as we already did it for kWhatOMXMessageList 5408 return onOMXMessage(msg); 5409 } 5410 5411 case ACodec::kWhatOMXMessage: 5412 { 5413 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5414 } 5415 5416 case ACodec::kWhatSetSurface: 5417 { 5418 sp<AReplyToken> replyID; 5419 CHECK(msg->senderAwaitsResponse(&replyID)); 5420 5421 sp<RefBase> obj; 5422 CHECK(msg->findObject("surface", &obj)); 5423 5424 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5425 5426 sp<AMessage> response = new AMessage; 5427 response->setInt32("err", err); 5428 response->postReply(replyID); 5429 break; 5430 } 5431 5432 case ACodec::kWhatCreateInputSurface: 5433 case ACodec::kWhatSetInputSurface: 5434 case ACodec::kWhatSignalEndOfInputStream: 5435 { 5436 // This may result in an app illegal state exception. 5437 ALOGE("Message 0x%x was not handled", msg->what()); 5438 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5439 return true; 5440 } 5441 5442 case ACodec::kWhatOMXDied: 5443 { 5444 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5445 ALOGE("OMX/mediaserver died, signalling error!"); 5446 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5447 break; 5448 } 5449 5450 case ACodec::kWhatReleaseCodecInstance: 5451 { 5452 ALOGI("[%s] forcing the release of codec", 5453 mCodec->mComponentName.c_str()); 5454 status_t err = mCodec->mOMX->freeNode(mCodec->mNode); 5455 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5456 mCodec->mComponentName.c_str(), err); 5457 sp<AMessage> notify = mCodec->mNotify->dup(); 5458 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5459 notify->post(); 5460 break; 5461 } 5462 5463 default: 5464 return false; 5465 } 5466 5467 return true; 5468} 5469 5470bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5471 // there is a possibility that this is an outstanding message for a 5472 // codec that we have already destroyed 5473 if (mCodec->mNode == 0) { 5474 ALOGI("ignoring message as already freed component: %s", 5475 msg->debugString().c_str()); 5476 return false; 5477 } 5478 5479 IOMX::node_id nodeID; 5480 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 5481 if (nodeID != mCodec->mNode) { 5482 ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode); 5483 return false; 5484 } 5485 return true; 5486} 5487 5488bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5489 sp<RefBase> obj; 5490 CHECK(msg->findObject("messages", &obj)); 5491 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5492 5493 bool receivedRenderedEvents = false; 5494 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5495 it != msgList->getList().cend(); ++it) { 5496 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5497 mCodec->handleMessage(*it); 5498 int32_t type; 5499 CHECK((*it)->findInt32("type", &type)); 5500 if (type == omx_message::FRAME_RENDERED) { 5501 receivedRenderedEvents = true; 5502 } 5503 } 5504 5505 if (receivedRenderedEvents) { 5506 // NOTE: all buffers are rendered in this case 5507 mCodec->notifyOfRenderedFrames(); 5508 } 5509 return true; 5510} 5511 5512bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5513 int32_t type; 5514 CHECK(msg->findInt32("type", &type)); 5515 5516 switch (type) { 5517 case omx_message::EVENT: 5518 { 5519 int32_t event, data1, data2; 5520 CHECK(msg->findInt32("event", &event)); 5521 CHECK(msg->findInt32("data1", &data1)); 5522 CHECK(msg->findInt32("data2", &data2)); 5523 5524 if (event == OMX_EventCmdComplete 5525 && data1 == OMX_CommandFlush 5526 && data2 == (int32_t)OMX_ALL) { 5527 // Use of this notification is not consistent across 5528 // implementations. We'll drop this notification and rely 5529 // on flush-complete notifications on the individual port 5530 // indices instead. 5531 5532 return true; 5533 } 5534 5535 return onOMXEvent( 5536 static_cast<OMX_EVENTTYPE>(event), 5537 static_cast<OMX_U32>(data1), 5538 static_cast<OMX_U32>(data2)); 5539 } 5540 5541 case omx_message::EMPTY_BUFFER_DONE: 5542 { 5543 IOMX::buffer_id bufferID; 5544 int32_t fenceFd; 5545 5546 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5547 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5548 5549 return onOMXEmptyBufferDone(bufferID, fenceFd); 5550 } 5551 5552 case omx_message::FILL_BUFFER_DONE: 5553 { 5554 IOMX::buffer_id bufferID; 5555 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5556 5557 int32_t rangeOffset, rangeLength, flags, fenceFd; 5558 int64_t timeUs; 5559 5560 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5561 CHECK(msg->findInt32("range_length", &rangeLength)); 5562 CHECK(msg->findInt32("flags", &flags)); 5563 CHECK(msg->findInt64("timestamp", &timeUs)); 5564 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5565 5566 return onOMXFillBufferDone( 5567 bufferID, 5568 (size_t)rangeOffset, (size_t)rangeLength, 5569 (OMX_U32)flags, 5570 timeUs, 5571 fenceFd); 5572 } 5573 5574 case omx_message::FRAME_RENDERED: 5575 { 5576 int64_t mediaTimeUs, systemNano; 5577 5578 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5579 CHECK(msg->findInt64("system_nano", &systemNano)); 5580 5581 return onOMXFrameRendered( 5582 mediaTimeUs, systemNano); 5583 } 5584 5585 default: 5586 ALOGE("Unexpected message type: %d", type); 5587 return false; 5588 } 5589} 5590 5591bool ACodec::BaseState::onOMXFrameRendered( 5592 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5593 // ignore outside of Executing and PortSettingsChanged states 5594 return true; 5595} 5596 5597bool ACodec::BaseState::onOMXEvent( 5598 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5599 if (event == OMX_EventDataSpaceChanged) { 5600 ColorAspects aspects; 5601 aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF); 5602 aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF); 5603 aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF); 5604 aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF); 5605 5606 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5607 return true; 5608 } 5609 5610 if (event != OMX_EventError) { 5611 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5612 mCodec->mComponentName.c_str(), event, data1, data2); 5613 5614 return false; 5615 } 5616 5617 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5618 5619 // verify OMX component sends back an error we expect. 5620 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5621 if (!isOMXError(omxError)) { 5622 ALOGW("Invalid OMX error %#x", omxError); 5623 omxError = OMX_ErrorUndefined; 5624 } 5625 mCodec->signalError(omxError); 5626 5627 return true; 5628} 5629 5630bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5631 ALOGV("[%s] onOMXEmptyBufferDone %u", 5632 mCodec->mComponentName.c_str(), bufferID); 5633 5634 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5635 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5636 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5637 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5638 mCodec->dumpBuffers(kPortIndexInput); 5639 if (fenceFd >= 0) { 5640 ::close(fenceFd); 5641 } 5642 return false; 5643 } 5644 info->mStatus = BufferInfo::OWNED_BY_US; 5645 5646 // input buffers cannot take fences, so wait for any fence now 5647 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5648 fenceFd = -1; 5649 5650 // still save fence for completeness 5651 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5652 5653 // We're in "store-metadata-in-buffers" mode, the underlying 5654 // OMX component had access to data that's implicitly refcounted 5655 // by this "MediaBuffer" object. Now that the OMX component has 5656 // told us that it's done with the input buffer, we can decrement 5657 // the mediaBuffer's reference count. 5658 info->mData->setMediaBufferBase(NULL); 5659 5660 PortMode mode = getPortMode(kPortIndexInput); 5661 5662 switch (mode) { 5663 case KEEP_BUFFERS: 5664 break; 5665 5666 case RESUBMIT_BUFFERS: 5667 postFillThisBuffer(info); 5668 break; 5669 5670 case FREE_BUFFERS: 5671 default: 5672 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5673 return false; 5674 } 5675 5676 return true; 5677} 5678 5679void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5680 if (mCodec->mPortEOS[kPortIndexInput]) { 5681 return; 5682 } 5683 5684 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5685 5686 sp<AMessage> notify = mCodec->mNotify->dup(); 5687 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5688 notify->setInt32("buffer-id", info->mBufferID); 5689 5690 info->mData->meta()->clear(); 5691 notify->setBuffer("buffer", info->mData); 5692 5693 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5694 reply->setInt32("buffer-id", info->mBufferID); 5695 5696 notify->setMessage("reply", reply); 5697 5698 notify->post(); 5699 5700 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5701} 5702 5703void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5704 IOMX::buffer_id bufferID; 5705 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5706 sp<ABuffer> buffer; 5707 int32_t err = OK; 5708 bool eos = false; 5709 PortMode mode = getPortMode(kPortIndexInput); 5710 5711 if (!msg->findBuffer("buffer", &buffer)) { 5712 /* these are unfilled buffers returned by client */ 5713 CHECK(msg->findInt32("err", &err)); 5714 5715 if (err == OK) { 5716 /* buffers with no errors are returned on MediaCodec.flush */ 5717 mode = KEEP_BUFFERS; 5718 } else { 5719 ALOGV("[%s] saw error %d instead of an input buffer", 5720 mCodec->mComponentName.c_str(), err); 5721 eos = true; 5722 } 5723 5724 buffer.clear(); 5725 } 5726 5727 int32_t tmp; 5728 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5729 eos = true; 5730 err = ERROR_END_OF_STREAM; 5731 } 5732 5733 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5734 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5735 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5736 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5737 mCodec->dumpBuffers(kPortIndexInput); 5738 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5739 return; 5740 } 5741 5742 info->mStatus = BufferInfo::OWNED_BY_US; 5743 5744 switch (mode) { 5745 case KEEP_BUFFERS: 5746 { 5747 if (eos) { 5748 if (!mCodec->mPortEOS[kPortIndexInput]) { 5749 mCodec->mPortEOS[kPortIndexInput] = true; 5750 mCodec->mInputEOSResult = err; 5751 } 5752 } 5753 break; 5754 } 5755 5756 case RESUBMIT_BUFFERS: 5757 { 5758 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5759 // Do not send empty input buffer w/o EOS to the component. 5760 if (buffer->size() == 0 && !eos) { 5761 postFillThisBuffer(info); 5762 break; 5763 } 5764 5765 int64_t timeUs; 5766 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5767 5768 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5769 5770 MetadataBufferType metaType = mCodec->mInputMetadataType; 5771 int32_t isCSD = 0; 5772 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5773 if (mCodec->mIsLegacyVP9Decoder) { 5774 ALOGV("[%s] is legacy VP9 decoder. Ignore %u codec specific data", 5775 mCodec->mComponentName.c_str(), bufferID); 5776 postFillThisBuffer(info); 5777 break; 5778 } 5779 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5780 metaType = kMetadataBufferTypeInvalid; 5781 } 5782 5783 if (eos) { 5784 flags |= OMX_BUFFERFLAG_EOS; 5785 } 5786 5787 if (buffer != info->mCodecData) { 5788 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5789 mCodec->mComponentName.c_str(), 5790 bufferID, 5791 buffer.get(), info->mCodecData.get()); 5792 5793 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; 5794 if (converter == NULL || isCSD) { 5795 converter = getCopyConverter(); 5796 } 5797 status_t err = converter->convert(buffer, info->mCodecData); 5798 if (err != OK) { 5799 mCodec->signalError(OMX_ErrorUndefined, err); 5800 return; 5801 } 5802 } 5803 5804 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5805 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5806 mCodec->mComponentName.c_str(), bufferID); 5807 } else if (flags & OMX_BUFFERFLAG_EOS) { 5808 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5809 mCodec->mComponentName.c_str(), bufferID); 5810 } else { 5811#if TRACK_BUFFER_TIMING 5812 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5813 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5814#else 5815 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5816 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5817#endif 5818 } 5819 5820#if TRACK_BUFFER_TIMING 5821 ACodec::BufferStats stats; 5822 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5823 stats.mFillBufferDoneTimeUs = -1ll; 5824 mCodec->mBufferStats.add(timeUs, stats); 5825#endif 5826 5827 if (mCodec->storingMetadataInDecodedBuffers()) { 5828 // try to submit an output buffer for each input buffer 5829 PortMode outputMode = getPortMode(kPortIndexOutput); 5830 5831 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 5832 mCodec->mMetadataBuffersToSubmit, 5833 (outputMode == FREE_BUFFERS ? "FREE" : 5834 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 5835 if (outputMode == RESUBMIT_BUFFERS) { 5836 mCodec->submitOutputMetadataBuffer(); 5837 } 5838 } 5839 info->checkReadFence("onInputBufferFilled"); 5840 5841 status_t err2 = OK; 5842 switch (metaType) { 5843 case kMetadataBufferTypeInvalid: 5844 break; 5845#ifndef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5846 case kMetadataBufferTypeNativeHandleSource: 5847 if (info->mCodecData->size() >= sizeof(VideoNativeHandleMetadata)) { 5848 VideoNativeHandleMetadata *vnhmd = 5849 (VideoNativeHandleMetadata*)info->mCodecData->base(); 5850 err2 = mCodec->mOMX->updateNativeHandleInMeta( 5851 mCodec->mNode, kPortIndexInput, 5852 NativeHandle::create(vnhmd->pHandle, false /* ownsHandle */), 5853 bufferID); 5854 } 5855 break; 5856 case kMetadataBufferTypeANWBuffer: 5857 if (info->mCodecData->size() >= sizeof(VideoNativeMetadata)) { 5858 VideoNativeMetadata *vnmd = (VideoNativeMetadata*)info->mCodecData->base(); 5859 err2 = mCodec->mOMX->updateGraphicBufferInMeta( 5860 mCodec->mNode, kPortIndexInput, 5861 new GraphicBuffer(vnmd->pBuffer, false /* keepOwnership */), 5862 bufferID); 5863 } 5864 break; 5865#endif 5866 default: 5867 ALOGW("Can't marshall %s data in %zu sized buffers in %zu-bit mode", 5868 asString(metaType), info->mCodecData->size(), 5869 sizeof(buffer_handle_t) * 8); 5870 err2 = ERROR_UNSUPPORTED; 5871 break; 5872 } 5873 5874 if (err2 == OK) { 5875 err2 = mCodec->mOMX->emptyBuffer( 5876 mCodec->mNode, 5877 bufferID, 5878 0, 5879 info->mCodecData->size(), 5880 flags, 5881 timeUs, 5882 info->mFenceFd); 5883 } 5884 info->mFenceFd = -1; 5885 if (err2 != OK) { 5886 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5887 return; 5888 } 5889 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5890 5891 if (!eos && err == OK) { 5892 getMoreInputDataIfPossible(); 5893 } else { 5894 ALOGV("[%s] Signalled EOS (%d) on the input port", 5895 mCodec->mComponentName.c_str(), err); 5896 5897 mCodec->mPortEOS[kPortIndexInput] = true; 5898 mCodec->mInputEOSResult = err; 5899 } 5900 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 5901 if (err != OK && err != ERROR_END_OF_STREAM) { 5902 ALOGV("[%s] Signalling EOS on the input port due to error %d", 5903 mCodec->mComponentName.c_str(), err); 5904 } else { 5905 ALOGV("[%s] Signalling EOS on the input port", 5906 mCodec->mComponentName.c_str()); 5907 } 5908 5909 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 5910 mCodec->mComponentName.c_str(), bufferID); 5911 5912 info->checkReadFence("onInputBufferFilled"); 5913 status_t err2 = mCodec->mOMX->emptyBuffer( 5914 mCodec->mNode, 5915 bufferID, 5916 0, 5917 0, 5918 OMX_BUFFERFLAG_EOS, 5919 0, 5920 info->mFenceFd); 5921 info->mFenceFd = -1; 5922 if (err2 != OK) { 5923 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5924 return; 5925 } 5926 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5927 5928 mCodec->mPortEOS[kPortIndexInput] = true; 5929 mCodec->mInputEOSResult = err; 5930 } 5931 break; 5932 } 5933 5934 case FREE_BUFFERS: 5935 break; 5936 5937 default: 5938 ALOGE("invalid port mode: %d", mode); 5939 break; 5940 } 5941} 5942 5943void ACodec::BaseState::getMoreInputDataIfPossible() { 5944 if (mCodec->mPortEOS[kPortIndexInput]) { 5945 return; 5946 } 5947 5948 BufferInfo *eligible = NULL; 5949 5950 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 5951 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5952 5953#if 0 5954 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 5955 // There's already a "read" pending. 5956 return; 5957 } 5958#endif 5959 5960 if (info->mStatus == BufferInfo::OWNED_BY_US) { 5961 eligible = info; 5962 } 5963 } 5964 5965 if (eligible == NULL) { 5966 return; 5967 } 5968 5969 postFillThisBuffer(eligible); 5970} 5971 5972bool ACodec::BaseState::onOMXFillBufferDone( 5973 IOMX::buffer_id bufferID, 5974 size_t rangeOffset, size_t rangeLength, 5975 OMX_U32 flags, 5976 int64_t timeUs, 5977 int fenceFd) { 5978 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 5979 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 5980 5981 ssize_t index; 5982 status_t err= OK; 5983 5984#if TRACK_BUFFER_TIMING 5985 index = mCodec->mBufferStats.indexOfKey(timeUs); 5986 if (index >= 0) { 5987 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 5988 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 5989 5990 ALOGI("frame PTS %lld: %lld", 5991 timeUs, 5992 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 5993 5994 mCodec->mBufferStats.removeItemsAt(index); 5995 stats = NULL; 5996 } 5997#endif 5998 5999 BufferInfo *info = 6000 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6001 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6002 if (status != BufferInfo::OWNED_BY_COMPONENT) { 6003 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6004 mCodec->dumpBuffers(kPortIndexOutput); 6005 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6006 if (fenceFd >= 0) { 6007 ::close(fenceFd); 6008 } 6009 return true; 6010 } 6011 6012 info->mDequeuedAt = ++mCodec->mDequeueCounter; 6013 info->mStatus = BufferInfo::OWNED_BY_US; 6014 6015 if (info->mRenderInfo != NULL) { 6016 // The fence for an emptied buffer must have signaled, but there still could be queued 6017 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 6018 // as we will soon requeue this buffer to the surface. While in theory we could still keep 6019 // track of buffers that are requeued to the surface, it is better to add support to the 6020 // buffer-queue to notify us of released buffers and their fences (in the future). 6021 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 6022 } 6023 6024 // byte buffers cannot take fences, so wait for any fence now 6025 if (mCodec->mNativeWindow == NULL) { 6026 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 6027 fenceFd = -1; 6028 } 6029 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 6030 6031 PortMode mode = getPortMode(kPortIndexOutput); 6032 6033 switch (mode) { 6034 case KEEP_BUFFERS: 6035 break; 6036 6037 case RESUBMIT_BUFFERS: 6038 { 6039 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 6040 || mCodec->mPortEOS[kPortIndexOutput])) { 6041 ALOGV("[%s] calling fillBuffer %u", 6042 mCodec->mComponentName.c_str(), info->mBufferID); 6043 6044 err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 6045 info->mFenceFd = -1; 6046 if (err != OK) { 6047 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6048 return true; 6049 } 6050 6051 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6052 break; 6053 } 6054 6055 sp<AMessage> reply = 6056 new AMessage(kWhatOutputBufferDrained, mCodec); 6057 6058 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 6059 // pretend that output format has changed on the first frame (we used to do this) 6060 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) { 6061 mCodec->onOutputFormatChanged(mCodec->mOutputFormat); 6062 } 6063 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 6064 mCodec->sendFormatChange(); 6065 } else if (rangeLength > 0 && mCodec->mNativeWindow != NULL) { 6066 // If potentially rendering onto a surface, always save key format data (crop & 6067 // data space) so that we can set it if and once the buffer is rendered. 6068 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 6069 } 6070 6071 if (mCodec->usingMetadataOnEncoderOutput()) { 6072 native_handle_t *handle = NULL; 6073 VideoNativeHandleMetadata &nativeMeta = 6074 *(VideoNativeHandleMetadata *)info->mData->data(); 6075 if (info->mData->size() >= sizeof(nativeMeta) 6076 && nativeMeta.eType == kMetadataBufferTypeNativeHandleSource) { 6077#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 6078 // handle is only valid on 32-bit/mediaserver process 6079 handle = NULL; 6080#else 6081 handle = (native_handle_t *)nativeMeta.pHandle; 6082#endif 6083 } 6084 info->mData->meta()->setPointer("handle", handle); 6085 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 6086 info->mData->meta()->setInt32("rangeLength", rangeLength); 6087 } else if (info->mData == info->mCodecData) { 6088 info->mData->setRange(rangeOffset, rangeLength); 6089 } else { 6090 info->mCodecData->setRange(rangeOffset, rangeLength); 6091 // in this case we know that mConverter is not null 6092 status_t err = mCodec->mConverter[kPortIndexOutput]->convert( 6093 info->mCodecData, info->mData); 6094 if (err != OK) { 6095 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6096 return true; 6097 } 6098 } 6099#if 0 6100 if (mCodec->mNativeWindow == NULL) { 6101 if (IsIDR(info->mData)) { 6102 ALOGI("IDR frame"); 6103 } 6104 } 6105#endif 6106 6107 if (mCodec->mSkipCutBuffer != NULL) { 6108 mCodec->mSkipCutBuffer->submit(info->mData); 6109 } 6110 info->mData->meta()->setInt64("timeUs", timeUs); 6111 6112 sp<AMessage> notify = mCodec->mNotify->dup(); 6113 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 6114 notify->setInt32("buffer-id", info->mBufferID); 6115 notify->setBuffer("buffer", info->mData); 6116 notify->setInt32("flags", flags); 6117 6118 reply->setInt32("buffer-id", info->mBufferID); 6119 6120 notify->setMessage("reply", reply); 6121 6122 notify->post(); 6123 6124 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 6125 6126 if (flags & OMX_BUFFERFLAG_EOS) { 6127 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 6128 6129 sp<AMessage> notify = mCodec->mNotify->dup(); 6130 notify->setInt32("what", CodecBase::kWhatEOS); 6131 notify->setInt32("err", mCodec->mInputEOSResult); 6132 notify->post(); 6133 6134 mCodec->mPortEOS[kPortIndexOutput] = true; 6135 } 6136 break; 6137 } 6138 6139 case FREE_BUFFERS: 6140 err = mCodec->freeBuffer(kPortIndexOutput, index); 6141 if (err != OK) { 6142 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6143 return true; 6144 } 6145 break; 6146 6147 default: 6148 ALOGE("Invalid port mode: %d", mode); 6149 return false; 6150 } 6151 6152 return true; 6153} 6154 6155void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 6156 IOMX::buffer_id bufferID; 6157 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 6158 ssize_t index; 6159 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6160 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6161 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 6162 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6163 mCodec->dumpBuffers(kPortIndexOutput); 6164 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6165 return; 6166 } 6167 6168 android_native_rect_t crop; 6169 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom) 6170 && memcmp(&crop, &mCodec->mLastNativeWindowCrop, sizeof(crop)) != 0) { 6171 mCodec->mLastNativeWindowCrop = crop; 6172 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 6173 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 6174 } 6175 6176 int32_t dataSpace; 6177 if (msg->findInt32("dataspace", &dataSpace) 6178 && dataSpace != mCodec->mLastNativeWindowDataSpace) { 6179 status_t err = native_window_set_buffers_data_space( 6180 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 6181 mCodec->mLastNativeWindowDataSpace = dataSpace; 6182 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 6183 } 6184 6185 int32_t render; 6186 if (mCodec->mNativeWindow != NULL 6187 && msg->findInt32("render", &render) && render != 0 6188 && info->mData != NULL && info->mData->size() != 0) { 6189 ATRACE_NAME("render"); 6190 // The client wants this buffer to be rendered. 6191 6192 // save buffers sent to the surface so we can get render time when they return 6193 int64_t mediaTimeUs = -1; 6194 info->mData->meta()->findInt64("timeUs", &mediaTimeUs); 6195 if (mediaTimeUs >= 0) { 6196 mCodec->mRenderTracker.onFrameQueued( 6197 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 6198 } 6199 6200 int64_t timestampNs = 0; 6201 if (!msg->findInt64("timestampNs", ×tampNs)) { 6202 // use media timestamp if client did not request a specific render timestamp 6203 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 6204 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 6205 timestampNs *= 1000; 6206 } 6207 } 6208 6209 status_t err; 6210 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 6211 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 6212 6213 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 6214 err = mCodec->mNativeWindow->queueBuffer( 6215 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 6216 info->mFenceFd = -1; 6217 if (err == OK) { 6218 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 6219 } else { 6220 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 6221 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6222 info->mStatus = BufferInfo::OWNED_BY_US; 6223 // keeping read fence as write fence to avoid clobbering 6224 info->mIsReadFence = false; 6225 } 6226 } else { 6227 if (mCodec->mNativeWindow != NULL && 6228 (info->mData == NULL || info->mData->size() != 0)) { 6229 // move read fence into write fence to avoid clobbering 6230 info->mIsReadFence = false; 6231 ATRACE_NAME("frame-drop"); 6232 } 6233 info->mStatus = BufferInfo::OWNED_BY_US; 6234 } 6235 6236 PortMode mode = getPortMode(kPortIndexOutput); 6237 6238 switch (mode) { 6239 case KEEP_BUFFERS: 6240 { 6241 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 6242 6243 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6244 // We cannot resubmit the buffer we just rendered, dequeue 6245 // the spare instead. 6246 6247 info = mCodec->dequeueBufferFromNativeWindow(); 6248 } 6249 break; 6250 } 6251 6252 case RESUBMIT_BUFFERS: 6253 { 6254 if (!mCodec->mPortEOS[kPortIndexOutput]) { 6255 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6256 // We cannot resubmit the buffer we just rendered, dequeue 6257 // the spare instead. 6258 6259 info = mCodec->dequeueBufferFromNativeWindow(); 6260 } 6261 6262 if (info != NULL) { 6263 ALOGV("[%s] calling fillBuffer %u", 6264 mCodec->mComponentName.c_str(), info->mBufferID); 6265 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 6266 status_t err = mCodec->mOMX->fillBuffer( 6267 mCodec->mNode, info->mBufferID, info->mFenceFd); 6268 info->mFenceFd = -1; 6269 if (err == OK) { 6270 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6271 } else { 6272 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6273 } 6274 } 6275 } 6276 break; 6277 } 6278 6279 case FREE_BUFFERS: 6280 { 6281 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 6282 if (err != OK) { 6283 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6284 } 6285 break; 6286 } 6287 6288 default: 6289 ALOGE("Invalid port mode: %d", mode); 6290 return; 6291 } 6292} 6293 6294//////////////////////////////////////////////////////////////////////////////// 6295 6296ACodec::UninitializedState::UninitializedState(ACodec *codec) 6297 : BaseState(codec) { 6298} 6299 6300void ACodec::UninitializedState::stateEntered() { 6301 ALOGV("Now uninitialized"); 6302 6303 if (mDeathNotifier != NULL) { 6304 mCodec->mNodeBinder->unlinkToDeath(mDeathNotifier); 6305 mDeathNotifier.clear(); 6306 } 6307 6308 mCodec->mUsingNativeWindow = false; 6309 mCodec->mNativeWindow.clear(); 6310 mCodec->mNativeWindowUsageBits = 0; 6311 mCodec->mNode = 0; 6312 mCodec->mOMX.clear(); 6313 mCodec->mQuirks = 0; 6314 mCodec->mFlags = 0; 6315 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 6316 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 6317 mCodec->mConverter[0].clear(); 6318 mCodec->mConverter[1].clear(); 6319 mCodec->mComponentName.clear(); 6320} 6321 6322bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 6323 bool handled = false; 6324 6325 switch (msg->what()) { 6326 case ACodec::kWhatSetup: 6327 { 6328 onSetup(msg); 6329 6330 handled = true; 6331 break; 6332 } 6333 6334 case ACodec::kWhatAllocateComponent: 6335 { 6336 onAllocateComponent(msg); 6337 handled = true; 6338 break; 6339 } 6340 6341 case ACodec::kWhatShutdown: 6342 { 6343 int32_t keepComponentAllocated; 6344 CHECK(msg->findInt32( 6345 "keepComponentAllocated", &keepComponentAllocated)); 6346 ALOGW_IF(keepComponentAllocated, 6347 "cannot keep component allocated on shutdown in Uninitialized state"); 6348 6349 sp<AMessage> notify = mCodec->mNotify->dup(); 6350 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6351 notify->post(); 6352 6353 handled = true; 6354 break; 6355 } 6356 6357 case ACodec::kWhatFlush: 6358 { 6359 sp<AMessage> notify = mCodec->mNotify->dup(); 6360 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6361 notify->post(); 6362 6363 handled = true; 6364 break; 6365 } 6366 6367 case ACodec::kWhatReleaseCodecInstance: 6368 { 6369 // nothing to do, as we have already signaled shutdown 6370 handled = true; 6371 break; 6372 } 6373 6374 default: 6375 return BaseState::onMessageReceived(msg); 6376 } 6377 6378 return handled; 6379} 6380 6381void ACodec::UninitializedState::onSetup( 6382 const sp<AMessage> &msg) { 6383 if (onAllocateComponent(msg) 6384 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6385 mCodec->mLoadedState->onStart(); 6386 } 6387} 6388 6389bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6390 ALOGV("onAllocateComponent"); 6391 6392 CHECK(mCodec->mNode == 0); 6393 6394 OMXClient client; 6395 if (client.connect() != OK) { 6396 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6397 return false; 6398 } 6399 6400 sp<IOMX> omx = client.interface(); 6401 6402 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6403 6404 Vector<AString> matchingCodecs; 6405 6406 AString mime; 6407 6408 AString componentName; 6409 uint32_t quirks = 0; 6410 int32_t encoder = false; 6411 if (msg->findString("componentName", &componentName)) { 6412 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6413 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6414 matchingCodecs.add(componentName); 6415 } 6416 } else { 6417 CHECK(msg->findString("mime", &mime)); 6418 6419 if (!msg->findInt32("encoder", &encoder)) { 6420 encoder = false; 6421 } 6422 6423 MediaCodecList::findMatchingCodecs( 6424 mime.c_str(), 6425 encoder, // createEncoder 6426 0, // flags 6427 &matchingCodecs); 6428 } 6429 6430 sp<CodecObserver> observer = new CodecObserver; 6431 IOMX::node_id node = 0; 6432 6433 status_t err = NAME_NOT_FOUND; 6434 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6435 ++matchIndex) { 6436 componentName = matchingCodecs[matchIndex]; 6437 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6438 6439 pid_t tid = gettid(); 6440 int prevPriority = androidGetThreadPriority(tid); 6441 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6442 err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node); 6443 androidSetThreadPriority(tid, prevPriority); 6444 6445 if (err == OK) { 6446 break; 6447 } else { 6448 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6449 } 6450 6451 node = 0; 6452 } 6453 6454 if (node == 0) { 6455 if (!mime.empty()) { 6456 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6457 encoder ? "en" : "de", mime.c_str(), err); 6458 } else { 6459 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6460 } 6461 6462 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6463 return false; 6464 } 6465 6466 mDeathNotifier = new DeathNotifier(notify); 6467 if (mCodec->mNodeBinder == NULL || 6468 mCodec->mNodeBinder->linkToDeath(mDeathNotifier) != OK) { 6469 // This was a local binder, if it dies so do we, we won't care 6470 // about any notifications in the afterlife. 6471 mDeathNotifier.clear(); 6472 } 6473 6474 notify = new AMessage(kWhatOMXMessageList, mCodec); 6475 observer->setNotificationMessage(notify); 6476 6477 mCodec->mComponentName = componentName; 6478 mCodec->mRenderTracker.setComponentName(componentName); 6479 mCodec->mFlags = 0; 6480 6481 if (componentName.endsWith(".secure")) { 6482 mCodec->mFlags |= kFlagIsSecure; 6483 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6484 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6485 } 6486 6487 mCodec->mQuirks = quirks; 6488 mCodec->mOMX = omx; 6489 mCodec->mNode = node; 6490 6491 { 6492 sp<AMessage> notify = mCodec->mNotify->dup(); 6493 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6494 notify->setString("componentName", mCodec->mComponentName.c_str()); 6495 notify->post(); 6496 } 6497 6498 mCodec->changeState(mCodec->mLoadedState); 6499 6500 return true; 6501} 6502 6503//////////////////////////////////////////////////////////////////////////////// 6504 6505ACodec::LoadedState::LoadedState(ACodec *codec) 6506 : BaseState(codec) { 6507} 6508 6509void ACodec::LoadedState::stateEntered() { 6510 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6511 6512 mCodec->mPortEOS[kPortIndexInput] = 6513 mCodec->mPortEOS[kPortIndexOutput] = false; 6514 6515 mCodec->mInputEOSResult = OK; 6516 6517 mCodec->mDequeueCounter = 0; 6518 mCodec->mMetadataBuffersToSubmit = 0; 6519 mCodec->mRepeatFrameDelayUs = -1ll; 6520 mCodec->mInputFormat.clear(); 6521 mCodec->mOutputFormat.clear(); 6522 mCodec->mBaseOutputFormat.clear(); 6523 6524 if (mCodec->mShutdownInProgress) { 6525 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6526 6527 mCodec->mShutdownInProgress = false; 6528 mCodec->mKeepComponentAllocated = false; 6529 6530 onShutdown(keepComponentAllocated); 6531 } 6532 mCodec->mExplicitShutdown = false; 6533 6534 mCodec->processDeferredMessages(); 6535} 6536 6537void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6538 if (!keepComponentAllocated) { 6539 (void)mCodec->mOMX->freeNode(mCodec->mNode); 6540 6541 mCodec->changeState(mCodec->mUninitializedState); 6542 } 6543 6544 if (mCodec->mExplicitShutdown) { 6545 sp<AMessage> notify = mCodec->mNotify->dup(); 6546 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6547 notify->post(); 6548 mCodec->mExplicitShutdown = false; 6549 } 6550} 6551 6552bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6553 bool handled = false; 6554 6555 switch (msg->what()) { 6556 case ACodec::kWhatConfigureComponent: 6557 { 6558 onConfigureComponent(msg); 6559 handled = true; 6560 break; 6561 } 6562 6563 case ACodec::kWhatCreateInputSurface: 6564 { 6565 onCreateInputSurface(msg); 6566 handled = true; 6567 break; 6568 } 6569 6570 case ACodec::kWhatSetInputSurface: 6571 { 6572 onSetInputSurface(msg); 6573 handled = true; 6574 break; 6575 } 6576 6577 case ACodec::kWhatStart: 6578 { 6579 onStart(); 6580 handled = true; 6581 break; 6582 } 6583 6584 case ACodec::kWhatShutdown: 6585 { 6586 int32_t keepComponentAllocated; 6587 CHECK(msg->findInt32( 6588 "keepComponentAllocated", &keepComponentAllocated)); 6589 6590 mCodec->mExplicitShutdown = true; 6591 onShutdown(keepComponentAllocated); 6592 6593 handled = true; 6594 break; 6595 } 6596 6597 case ACodec::kWhatFlush: 6598 { 6599 sp<AMessage> notify = mCodec->mNotify->dup(); 6600 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6601 notify->post(); 6602 6603 handled = true; 6604 break; 6605 } 6606 6607 default: 6608 return BaseState::onMessageReceived(msg); 6609 } 6610 6611 return handled; 6612} 6613 6614bool ACodec::LoadedState::onConfigureComponent( 6615 const sp<AMessage> &msg) { 6616 ALOGV("onConfigureComponent"); 6617 6618 CHECK(mCodec->mNode != 0); 6619 6620 status_t err = OK; 6621 AString mime; 6622 if (!msg->findString("mime", &mime)) { 6623 err = BAD_VALUE; 6624 } else { 6625 err = mCodec->configureCodec(mime.c_str(), msg); 6626 } 6627 if (err != OK) { 6628 ALOGE("[%s] configureCodec returning error %d", 6629 mCodec->mComponentName.c_str(), err); 6630 6631 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6632 return false; 6633 } 6634 6635 { 6636 sp<AMessage> notify = mCodec->mNotify->dup(); 6637 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6638 notify->setMessage("input-format", mCodec->mInputFormat); 6639 notify->setMessage("output-format", mCodec->mOutputFormat); 6640 notify->post(); 6641 } 6642 6643 return true; 6644} 6645 6646status_t ACodec::LoadedState::setupInputSurface() { 6647 status_t err = OK; 6648 6649 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6650 err = mCodec->mOMX->setInternalOption( 6651 mCodec->mNode, 6652 kPortIndexInput, 6653 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 6654 &mCodec->mRepeatFrameDelayUs, 6655 sizeof(mCodec->mRepeatFrameDelayUs)); 6656 6657 if (err != OK) { 6658 ALOGE("[%s] Unable to configure option to repeat previous " 6659 "frames (err %d)", 6660 mCodec->mComponentName.c_str(), 6661 err); 6662 return err; 6663 } 6664 } 6665 6666 if (mCodec->mMaxPtsGapUs > 0ll) { 6667 err = mCodec->mOMX->setInternalOption( 6668 mCodec->mNode, 6669 kPortIndexInput, 6670 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 6671 &mCodec->mMaxPtsGapUs, 6672 sizeof(mCodec->mMaxPtsGapUs)); 6673 6674 if (err != OK) { 6675 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6676 mCodec->mComponentName.c_str(), 6677 err); 6678 return err; 6679 } 6680 } 6681 6682 if (mCodec->mMaxFps > 0) { 6683 err = mCodec->mOMX->setInternalOption( 6684 mCodec->mNode, 6685 kPortIndexInput, 6686 IOMX::INTERNAL_OPTION_MAX_FPS, 6687 &mCodec->mMaxFps, 6688 sizeof(mCodec->mMaxFps)); 6689 6690 if (err != OK) { 6691 ALOGE("[%s] Unable to configure max fps (err %d)", 6692 mCodec->mComponentName.c_str(), 6693 err); 6694 return err; 6695 } 6696 } 6697 6698 if (mCodec->mTimePerCaptureUs > 0ll 6699 && mCodec->mTimePerFrameUs > 0ll) { 6700 int64_t timeLapse[2]; 6701 timeLapse[0] = mCodec->mTimePerFrameUs; 6702 timeLapse[1] = mCodec->mTimePerCaptureUs; 6703 err = mCodec->mOMX->setInternalOption( 6704 mCodec->mNode, 6705 kPortIndexInput, 6706 IOMX::INTERNAL_OPTION_TIME_LAPSE, 6707 &timeLapse[0], 6708 sizeof(timeLapse)); 6709 6710 if (err != OK) { 6711 ALOGE("[%s] Unable to configure time lapse (err %d)", 6712 mCodec->mComponentName.c_str(), 6713 err); 6714 return err; 6715 } 6716 } 6717 6718 if (mCodec->mCreateInputBuffersSuspended) { 6719 bool suspend = true; 6720 err = mCodec->mOMX->setInternalOption( 6721 mCodec->mNode, 6722 kPortIndexInput, 6723 IOMX::INTERNAL_OPTION_SUSPEND, 6724 &suspend, 6725 sizeof(suspend)); 6726 6727 if (err != OK) { 6728 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6729 mCodec->mComponentName.c_str(), 6730 err); 6731 return err; 6732 } 6733 } 6734 6735 uint32_t usageBits; 6736 if (mCodec->mOMX->getParameter( 6737 mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6738 &usageBits, sizeof(usageBits)) == OK) { 6739 mCodec->mInputFormat->setInt32( 6740 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6741 } 6742 6743 sp<ABuffer> colorAspectsBuffer; 6744 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6745 err = mCodec->mOMX->setInternalOption( 6746 mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS, 6747 colorAspectsBuffer->base(), colorAspectsBuffer->capacity()); 6748 if (err != OK) { 6749 ALOGE("[%s] Unable to configure color aspects (err %d)", 6750 mCodec->mComponentName.c_str(), err); 6751 return err; 6752 } 6753 } 6754 return OK; 6755} 6756 6757void ACodec::LoadedState::onCreateInputSurface( 6758 const sp<AMessage> & /* msg */) { 6759 ALOGV("onCreateInputSurface"); 6760 6761 sp<AMessage> notify = mCodec->mNotify->dup(); 6762 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6763 6764 android_dataspace dataSpace; 6765 status_t err = 6766 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6767 notify->setMessage("input-format", mCodec->mInputFormat); 6768 notify->setMessage("output-format", mCodec->mOutputFormat); 6769 6770 sp<IGraphicBufferProducer> bufferProducer; 6771 if (err == OK) { 6772 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6773 err = mCodec->mOMX->createInputSurface( 6774 mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, 6775 &mCodec->mInputMetadataType); 6776 // framework uses ANW buffers internally instead of gralloc handles 6777 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6778 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6779 } 6780 } 6781 6782 if (err == OK) { 6783 err = setupInputSurface(); 6784 } 6785 6786 if (err == OK) { 6787 notify->setObject("input-surface", 6788 new BufferProducerWrapper(bufferProducer)); 6789 } else { 6790 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6791 // the error through because it's in the "configured" state. We 6792 // send a kWhatInputSurfaceCreated with an error value instead. 6793 ALOGE("[%s] onCreateInputSurface returning error %d", 6794 mCodec->mComponentName.c_str(), err); 6795 notify->setInt32("err", err); 6796 } 6797 notify->post(); 6798} 6799 6800void ACodec::LoadedState::onSetInputSurface( 6801 const sp<AMessage> &msg) { 6802 ALOGV("onSetInputSurface"); 6803 6804 sp<AMessage> notify = mCodec->mNotify->dup(); 6805 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6806 6807 sp<RefBase> obj; 6808 CHECK(msg->findObject("input-surface", &obj)); 6809 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6810 6811 android_dataspace dataSpace; 6812 status_t err = 6813 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6814 notify->setMessage("input-format", mCodec->mInputFormat); 6815 notify->setMessage("output-format", mCodec->mOutputFormat); 6816 6817 if (err == OK) { 6818 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6819 err = mCodec->mOMX->setInputSurface( 6820 mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(), 6821 &mCodec->mInputMetadataType); 6822 // framework uses ANW buffers internally instead of gralloc handles 6823 if (mCodec->mInputMetadataType == kMetadataBufferTypeGrallocSource) { 6824 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6825 } 6826 } 6827 6828 if (err == OK) { 6829 surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace); 6830 err = setupInputSurface(); 6831 } 6832 6833 if (err != OK) { 6834 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6835 // the error through because it's in the "configured" state. We 6836 // send a kWhatInputSurfaceAccepted with an error value instead. 6837 ALOGE("[%s] onSetInputSurface returning error %d", 6838 mCodec->mComponentName.c_str(), err); 6839 notify->setInt32("err", err); 6840 } 6841 notify->post(); 6842} 6843 6844void ACodec::LoadedState::onStart() { 6845 ALOGV("onStart"); 6846 6847 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6848 if (err != OK) { 6849 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6850 } else { 6851 mCodec->changeState(mCodec->mLoadedToIdleState); 6852 } 6853} 6854 6855//////////////////////////////////////////////////////////////////////////////// 6856 6857ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 6858 : BaseState(codec) { 6859} 6860 6861void ACodec::LoadedToIdleState::stateEntered() { 6862 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 6863 6864 status_t err; 6865 if ((err = allocateBuffers()) != OK) { 6866 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 6867 "(error 0x%08x)", 6868 err); 6869 6870 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6871 6872 mCodec->mOMX->sendCommand( 6873 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 6874 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 6875 mCodec->freeBuffersOnPort(kPortIndexInput); 6876 } 6877 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 6878 mCodec->freeBuffersOnPort(kPortIndexOutput); 6879 } 6880 6881 mCodec->changeState(mCodec->mLoadedState); 6882 } 6883} 6884 6885status_t ACodec::LoadedToIdleState::allocateBuffers() { 6886 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 6887 6888 if (err != OK) { 6889 return err; 6890 } 6891 6892 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 6893} 6894 6895bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 6896 switch (msg->what()) { 6897 case kWhatSetParameters: 6898 case kWhatShutdown: 6899 { 6900 mCodec->deferMessage(msg); 6901 return true; 6902 } 6903 6904 case kWhatSignalEndOfInputStream: 6905 { 6906 mCodec->onSignalEndOfInputStream(); 6907 return true; 6908 } 6909 6910 case kWhatResume: 6911 { 6912 // We'll be active soon enough. 6913 return true; 6914 } 6915 6916 case kWhatFlush: 6917 { 6918 // We haven't even started yet, so we're flushed alright... 6919 sp<AMessage> notify = mCodec->mNotify->dup(); 6920 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6921 notify->post(); 6922 return true; 6923 } 6924 6925 default: 6926 return BaseState::onMessageReceived(msg); 6927 } 6928} 6929 6930bool ACodec::LoadedToIdleState::onOMXEvent( 6931 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6932 switch (event) { 6933 case OMX_EventCmdComplete: 6934 { 6935 status_t err = OK; 6936 if (data1 != (OMX_U32)OMX_CommandStateSet 6937 || data2 != (OMX_U32)OMX_StateIdle) { 6938 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 6939 asString((OMX_COMMANDTYPE)data1), data1, 6940 asString((OMX_STATETYPE)data2), data2); 6941 err = FAILED_TRANSACTION; 6942 } 6943 6944 if (err == OK) { 6945 err = mCodec->mOMX->sendCommand( 6946 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting); 6947 } 6948 6949 if (err != OK) { 6950 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6951 } else { 6952 mCodec->changeState(mCodec->mIdleToExecutingState); 6953 } 6954 6955 return true; 6956 } 6957 6958 default: 6959 return BaseState::onOMXEvent(event, data1, data2); 6960 } 6961} 6962 6963//////////////////////////////////////////////////////////////////////////////// 6964 6965ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 6966 : BaseState(codec) { 6967} 6968 6969void ACodec::IdleToExecutingState::stateEntered() { 6970 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 6971} 6972 6973bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6974 switch (msg->what()) { 6975 case kWhatSetParameters: 6976 case kWhatShutdown: 6977 { 6978 mCodec->deferMessage(msg); 6979 return true; 6980 } 6981 6982 case kWhatResume: 6983 { 6984 // We'll be active soon enough. 6985 return true; 6986 } 6987 6988 case kWhatFlush: 6989 { 6990 // We haven't even started yet, so we're flushed alright... 6991 sp<AMessage> notify = mCodec->mNotify->dup(); 6992 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6993 notify->post(); 6994 6995 return true; 6996 } 6997 6998 case kWhatSignalEndOfInputStream: 6999 { 7000 mCodec->onSignalEndOfInputStream(); 7001 return true; 7002 } 7003 7004 default: 7005 return BaseState::onMessageReceived(msg); 7006 } 7007} 7008 7009bool ACodec::IdleToExecutingState::onOMXEvent( 7010 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7011 switch (event) { 7012 case OMX_EventCmdComplete: 7013 { 7014 if (data1 != (OMX_U32)OMX_CommandStateSet 7015 || data2 != (OMX_U32)OMX_StateExecuting) { 7016 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 7017 asString((OMX_COMMANDTYPE)data1), data1, 7018 asString((OMX_STATETYPE)data2), data2); 7019 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7020 return true; 7021 } 7022 7023 mCodec->mExecutingState->resume(); 7024 mCodec->changeState(mCodec->mExecutingState); 7025 7026 return true; 7027 } 7028 7029 default: 7030 return BaseState::onOMXEvent(event, data1, data2); 7031 } 7032} 7033 7034//////////////////////////////////////////////////////////////////////////////// 7035 7036ACodec::ExecutingState::ExecutingState(ACodec *codec) 7037 : BaseState(codec), 7038 mActive(false) { 7039} 7040 7041ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 7042 OMX_U32 /* portIndex */) { 7043 return RESUBMIT_BUFFERS; 7044} 7045 7046void ACodec::ExecutingState::submitOutputMetaBuffers() { 7047 // submit as many buffers as there are input buffers with the codec 7048 // in case we are in port reconfiguring 7049 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 7050 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7051 7052 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 7053 if (mCodec->submitOutputMetadataBuffer() != OK) 7054 break; 7055 } 7056 } 7057 7058 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7059 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7060} 7061 7062void ACodec::ExecutingState::submitRegularOutputBuffers() { 7063 bool failed = false; 7064 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 7065 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 7066 7067 if (mCodec->mNativeWindow != NULL) { 7068 if (info->mStatus != BufferInfo::OWNED_BY_US 7069 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 7070 ALOGE("buffers should be owned by us or the surface"); 7071 failed = true; 7072 break; 7073 } 7074 7075 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 7076 continue; 7077 } 7078 } else { 7079 if (info->mStatus != BufferInfo::OWNED_BY_US) { 7080 ALOGE("buffers should be owned by us"); 7081 failed = true; 7082 break; 7083 } 7084 } 7085 7086 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 7087 7088 info->checkWriteFence("submitRegularOutputBuffers"); 7089 status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 7090 info->mFenceFd = -1; 7091 if (err != OK) { 7092 failed = true; 7093 break; 7094 } 7095 7096 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 7097 } 7098 7099 if (failed) { 7100 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7101 } 7102} 7103 7104void ACodec::ExecutingState::submitOutputBuffers() { 7105 submitRegularOutputBuffers(); 7106 if (mCodec->storingMetadataInDecodedBuffers()) { 7107 submitOutputMetaBuffers(); 7108 } 7109} 7110 7111void ACodec::ExecutingState::resume() { 7112 if (mActive) { 7113 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 7114 return; 7115 } 7116 7117 submitOutputBuffers(); 7118 7119 // Post all available input buffers 7120 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 7121 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 7122 } 7123 7124 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 7125 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7126 if (info->mStatus == BufferInfo::OWNED_BY_US) { 7127 postFillThisBuffer(info); 7128 } 7129 } 7130 7131 mActive = true; 7132} 7133 7134void ACodec::ExecutingState::stateEntered() { 7135 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 7136 7137 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7138 mCodec->processDeferredMessages(); 7139} 7140 7141bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 7142 bool handled = false; 7143 7144 switch (msg->what()) { 7145 case kWhatShutdown: 7146 { 7147 int32_t keepComponentAllocated; 7148 CHECK(msg->findInt32( 7149 "keepComponentAllocated", &keepComponentAllocated)); 7150 7151 mCodec->mShutdownInProgress = true; 7152 mCodec->mExplicitShutdown = true; 7153 mCodec->mKeepComponentAllocated = keepComponentAllocated; 7154 7155 mActive = false; 7156 7157 status_t err = mCodec->mOMX->sendCommand( 7158 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 7159 if (err != OK) { 7160 if (keepComponentAllocated) { 7161 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7162 } 7163 // TODO: do some recovery here. 7164 } else { 7165 mCodec->changeState(mCodec->mExecutingToIdleState); 7166 } 7167 7168 handled = true; 7169 break; 7170 } 7171 7172 case kWhatFlush: 7173 { 7174 ALOGV("[%s] ExecutingState flushing now " 7175 "(codec owns %zu/%zu input, %zu/%zu output).", 7176 mCodec->mComponentName.c_str(), 7177 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 7178 mCodec->mBuffers[kPortIndexInput].size(), 7179 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 7180 mCodec->mBuffers[kPortIndexOutput].size()); 7181 7182 mActive = false; 7183 7184 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL); 7185 if (err != OK) { 7186 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7187 } else { 7188 mCodec->changeState(mCodec->mFlushingState); 7189 } 7190 7191 handled = true; 7192 break; 7193 } 7194 7195 case kWhatResume: 7196 { 7197 resume(); 7198 7199 handled = true; 7200 break; 7201 } 7202 7203 case kWhatRequestIDRFrame: 7204 { 7205 status_t err = mCodec->requestIDRFrame(); 7206 if (err != OK) { 7207 ALOGW("Requesting an IDR frame failed."); 7208 } 7209 7210 handled = true; 7211 break; 7212 } 7213 7214 case kWhatSetParameters: 7215 { 7216 sp<AMessage> params; 7217 CHECK(msg->findMessage("params", ¶ms)); 7218 7219 status_t err = mCodec->setParameters(params); 7220 7221 sp<AMessage> reply; 7222 if (msg->findMessage("reply", &reply)) { 7223 reply->setInt32("err", err); 7224 reply->post(); 7225 } 7226 7227 handled = true; 7228 break; 7229 } 7230 7231 case ACodec::kWhatSignalEndOfInputStream: 7232 { 7233 mCodec->onSignalEndOfInputStream(); 7234 handled = true; 7235 break; 7236 } 7237 7238 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7239 case kWhatSubmitOutputMetadataBufferIfEOS: 7240 { 7241 if (mCodec->mPortEOS[kPortIndexInput] && 7242 !mCodec->mPortEOS[kPortIndexOutput]) { 7243 status_t err = mCodec->submitOutputMetadataBuffer(); 7244 if (err == OK) { 7245 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7246 } 7247 } 7248 return true; 7249 } 7250 7251 default: 7252 handled = BaseState::onMessageReceived(msg); 7253 break; 7254 } 7255 7256 return handled; 7257} 7258 7259status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 7260 int32_t videoBitrate; 7261 if (params->findInt32("video-bitrate", &videoBitrate)) { 7262 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 7263 InitOMXParams(&configParams); 7264 configParams.nPortIndex = kPortIndexOutput; 7265 configParams.nEncodeBitrate = videoBitrate; 7266 7267 status_t err = mOMX->setConfig( 7268 mNode, 7269 OMX_IndexConfigVideoBitrate, 7270 &configParams, 7271 sizeof(configParams)); 7272 7273 if (err != OK) { 7274 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 7275 videoBitrate, err); 7276 7277 return err; 7278 } 7279 } 7280 7281 int64_t skipFramesBeforeUs; 7282 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 7283 status_t err = 7284 mOMX->setInternalOption( 7285 mNode, 7286 kPortIndexInput, 7287 IOMX::INTERNAL_OPTION_START_TIME, 7288 &skipFramesBeforeUs, 7289 sizeof(skipFramesBeforeUs)); 7290 7291 if (err != OK) { 7292 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 7293 return err; 7294 } 7295 } 7296 7297 int32_t dropInputFrames; 7298 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 7299 bool suspend = dropInputFrames != 0; 7300 7301 status_t err = 7302 mOMX->setInternalOption( 7303 mNode, 7304 kPortIndexInput, 7305 IOMX::INTERNAL_OPTION_SUSPEND, 7306 &suspend, 7307 sizeof(suspend)); 7308 7309 if (err != OK) { 7310 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 7311 return err; 7312 } 7313 } 7314 7315 int32_t dummy; 7316 if (params->findInt32("request-sync", &dummy)) { 7317 status_t err = requestIDRFrame(); 7318 7319 if (err != OK) { 7320 ALOGE("Requesting a sync frame failed w/ err %d", err); 7321 return err; 7322 } 7323 } 7324 7325 float rate; 7326 if (params->findFloat("operating-rate", &rate) && rate > 0) { 7327 status_t err = setOperatingRate(rate, mIsVideo); 7328 if (err != OK) { 7329 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 7330 return err; 7331 } 7332 } 7333 7334 int32_t intraRefreshPeriod = 0; 7335 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 7336 && intraRefreshPeriod > 0) { 7337 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 7338 if (err != OK) { 7339 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 7340 mComponentName.c_str()); 7341 err = OK; 7342 } 7343 } 7344 7345 return OK; 7346} 7347 7348void ACodec::onSignalEndOfInputStream() { 7349 sp<AMessage> notify = mNotify->dup(); 7350 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 7351 7352 status_t err = mOMX->signalEndOfInputStream(mNode); 7353 if (err != OK) { 7354 notify->setInt32("err", err); 7355 } 7356 notify->post(); 7357} 7358 7359bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 7360 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7361 return true; 7362} 7363 7364bool ACodec::ExecutingState::onOMXEvent( 7365 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7366 switch (event) { 7367 case OMX_EventPortSettingsChanged: 7368 { 7369 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 7370 7371 mCodec->onOutputFormatChanged(); 7372 7373 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 7374 mCodec->mMetadataBuffersToSubmit = 0; 7375 CHECK_EQ(mCodec->mOMX->sendCommand( 7376 mCodec->mNode, 7377 OMX_CommandPortDisable, kPortIndexOutput), 7378 (status_t)OK); 7379 7380 mCodec->freeOutputBuffersNotOwnedByComponent(); 7381 7382 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7383 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7384 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7385 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7386 mCodec->mComponentName.c_str(), data2); 7387 } 7388 7389 return true; 7390 } 7391 7392 case OMX_EventBufferFlag: 7393 { 7394 return true; 7395 } 7396 7397 default: 7398 return BaseState::onOMXEvent(event, data1, data2); 7399 } 7400} 7401 7402//////////////////////////////////////////////////////////////////////////////// 7403 7404ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7405 ACodec *codec) 7406 : BaseState(codec) { 7407} 7408 7409ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7410 OMX_U32 portIndex) { 7411 if (portIndex == kPortIndexOutput) { 7412 return FREE_BUFFERS; 7413 } 7414 7415 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7416 7417 return RESUBMIT_BUFFERS; 7418} 7419 7420bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7421 const sp<AMessage> &msg) { 7422 bool handled = false; 7423 7424 switch (msg->what()) { 7425 case kWhatFlush: 7426 case kWhatShutdown: 7427 case kWhatResume: 7428 case kWhatSetParameters: 7429 { 7430 if (msg->what() == kWhatResume) { 7431 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7432 } 7433 7434 mCodec->deferMessage(msg); 7435 handled = true; 7436 break; 7437 } 7438 7439 default: 7440 handled = BaseState::onMessageReceived(msg); 7441 break; 7442 } 7443 7444 return handled; 7445} 7446 7447void ACodec::OutputPortSettingsChangedState::stateEntered() { 7448 ALOGV("[%s] Now handling output port settings change", 7449 mCodec->mComponentName.c_str()); 7450} 7451 7452bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7453 int64_t mediaTimeUs, nsecs_t systemNano) { 7454 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7455 return true; 7456} 7457 7458bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7459 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7460 switch (event) { 7461 case OMX_EventCmdComplete: 7462 { 7463 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7464 if (data2 != (OMX_U32)kPortIndexOutput) { 7465 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7466 return false; 7467 } 7468 7469 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7470 7471 status_t err = OK; 7472 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7473 ALOGE("disabled port should be empty, but has %zu buffers", 7474 mCodec->mBuffers[kPortIndexOutput].size()); 7475 err = FAILED_TRANSACTION; 7476 } else { 7477 mCodec->mDealer[kPortIndexOutput].clear(); 7478 } 7479 7480 if (err == OK) { 7481 err = mCodec->mOMX->sendCommand( 7482 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput); 7483 } 7484 7485 if (err == OK) { 7486 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7487 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7488 "reconfiguration: (%d)", err); 7489 } 7490 7491 if (err != OK) { 7492 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7493 7494 // This is technically not correct, but appears to be 7495 // the only way to free the component instance. 7496 // Controlled transitioning from excecuting->idle 7497 // and idle->loaded seem impossible probably because 7498 // the output port never finishes re-enabling. 7499 mCodec->mShutdownInProgress = true; 7500 mCodec->mKeepComponentAllocated = false; 7501 mCodec->changeState(mCodec->mLoadedState); 7502 } 7503 7504 return true; 7505 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7506 if (data2 != (OMX_U32)kPortIndexOutput) { 7507 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7508 return false; 7509 } 7510 7511 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7512 7513 if (mCodec->mExecutingState->active()) { 7514 mCodec->mExecutingState->submitOutputBuffers(); 7515 } 7516 7517 mCodec->changeState(mCodec->mExecutingState); 7518 7519 return true; 7520 } 7521 7522 return false; 7523 } 7524 7525 default: 7526 return false; 7527 } 7528} 7529 7530//////////////////////////////////////////////////////////////////////////////// 7531 7532ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7533 : BaseState(codec), 7534 mComponentNowIdle(false) { 7535} 7536 7537bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7538 bool handled = false; 7539 7540 switch (msg->what()) { 7541 case kWhatFlush: 7542 { 7543 // Don't send me a flush request if you previously wanted me 7544 // to shutdown. 7545 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7546 break; 7547 } 7548 7549 case kWhatShutdown: 7550 { 7551 // We're already doing that... 7552 7553 handled = true; 7554 break; 7555 } 7556 7557 default: 7558 handled = BaseState::onMessageReceived(msg); 7559 break; 7560 } 7561 7562 return handled; 7563} 7564 7565void ACodec::ExecutingToIdleState::stateEntered() { 7566 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7567 7568 mComponentNowIdle = false; 7569 mCodec->mLastOutputFormat.clear(); 7570} 7571 7572bool ACodec::ExecutingToIdleState::onOMXEvent( 7573 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7574 switch (event) { 7575 case OMX_EventCmdComplete: 7576 { 7577 if (data1 != (OMX_U32)OMX_CommandStateSet 7578 || data2 != (OMX_U32)OMX_StateIdle) { 7579 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7580 asString((OMX_COMMANDTYPE)data1), data1, 7581 asString((OMX_STATETYPE)data2), data2); 7582 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7583 return true; 7584 } 7585 7586 mComponentNowIdle = true; 7587 7588 changeStateIfWeOwnAllBuffers(); 7589 7590 return true; 7591 } 7592 7593 case OMX_EventPortSettingsChanged: 7594 case OMX_EventBufferFlag: 7595 { 7596 // We're shutting down and don't care about this anymore. 7597 return true; 7598 } 7599 7600 default: 7601 return BaseState::onOMXEvent(event, data1, data2); 7602 } 7603} 7604 7605void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7606 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7607 status_t err = mCodec->mOMX->sendCommand( 7608 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7609 if (err == OK) { 7610 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7611 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7612 if (err == OK) { 7613 err = err2; 7614 } 7615 } 7616 7617 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7618 && mCodec->mNativeWindow != NULL) { 7619 // We push enough 1x1 blank buffers to ensure that one of 7620 // them has made it to the display. This allows the OMX 7621 // component teardown to zero out any protected buffers 7622 // without the risk of scanning out one of those buffers. 7623 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7624 } 7625 7626 if (err != OK) { 7627 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7628 return; 7629 } 7630 7631 mCodec->changeState(mCodec->mIdleToLoadedState); 7632 } 7633} 7634 7635void ACodec::ExecutingToIdleState::onInputBufferFilled( 7636 const sp<AMessage> &msg) { 7637 BaseState::onInputBufferFilled(msg); 7638 7639 changeStateIfWeOwnAllBuffers(); 7640} 7641 7642void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7643 const sp<AMessage> &msg) { 7644 BaseState::onOutputBufferDrained(msg); 7645 7646 changeStateIfWeOwnAllBuffers(); 7647} 7648 7649//////////////////////////////////////////////////////////////////////////////// 7650 7651ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7652 : BaseState(codec) { 7653} 7654 7655bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7656 bool handled = false; 7657 7658 switch (msg->what()) { 7659 case kWhatShutdown: 7660 { 7661 // We're already doing that... 7662 7663 handled = true; 7664 break; 7665 } 7666 7667 case kWhatFlush: 7668 { 7669 // Don't send me a flush request if you previously wanted me 7670 // to shutdown. 7671 ALOGE("Got flush request in IdleToLoadedState"); 7672 break; 7673 } 7674 7675 default: 7676 handled = BaseState::onMessageReceived(msg); 7677 break; 7678 } 7679 7680 return handled; 7681} 7682 7683void ACodec::IdleToLoadedState::stateEntered() { 7684 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7685} 7686 7687bool ACodec::IdleToLoadedState::onOMXEvent( 7688 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7689 switch (event) { 7690 case OMX_EventCmdComplete: 7691 { 7692 if (data1 != (OMX_U32)OMX_CommandStateSet 7693 || data2 != (OMX_U32)OMX_StateLoaded) { 7694 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7695 asString((OMX_COMMANDTYPE)data1), data1, 7696 asString((OMX_STATETYPE)data2), data2); 7697 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7698 return true; 7699 } 7700 7701 mCodec->changeState(mCodec->mLoadedState); 7702 7703 return true; 7704 } 7705 7706 default: 7707 return BaseState::onOMXEvent(event, data1, data2); 7708 } 7709} 7710 7711//////////////////////////////////////////////////////////////////////////////// 7712 7713ACodec::FlushingState::FlushingState(ACodec *codec) 7714 : BaseState(codec) { 7715} 7716 7717void ACodec::FlushingState::stateEntered() { 7718 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7719 7720 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7721} 7722 7723bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7724 bool handled = false; 7725 7726 switch (msg->what()) { 7727 case kWhatShutdown: 7728 { 7729 mCodec->deferMessage(msg); 7730 break; 7731 } 7732 7733 case kWhatFlush: 7734 { 7735 // We're already doing this right now. 7736 handled = true; 7737 break; 7738 } 7739 7740 default: 7741 handled = BaseState::onMessageReceived(msg); 7742 break; 7743 } 7744 7745 return handled; 7746} 7747 7748bool ACodec::FlushingState::onOMXEvent( 7749 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7750 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7751 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7752 7753 switch (event) { 7754 case OMX_EventCmdComplete: 7755 { 7756 if (data1 != (OMX_U32)OMX_CommandFlush) { 7757 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7758 asString((OMX_COMMANDTYPE)data1), data1, data2); 7759 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7760 return true; 7761 } 7762 7763 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7764 if (mFlushComplete[data2]) { 7765 ALOGW("Flush already completed for %s port", 7766 data2 == kPortIndexInput ? "input" : "output"); 7767 return true; 7768 } 7769 mFlushComplete[data2] = true; 7770 7771 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7772 changeStateIfWeOwnAllBuffers(); 7773 } 7774 } else if (data2 == OMX_ALL) { 7775 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7776 ALOGW("received flush complete event for OMX_ALL before ports have been" 7777 "flushed (%d/%d)", 7778 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7779 return false; 7780 } 7781 7782 changeStateIfWeOwnAllBuffers(); 7783 } else { 7784 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7785 } 7786 7787 return true; 7788 } 7789 7790 case OMX_EventPortSettingsChanged: 7791 { 7792 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7793 msg->setInt32("type", omx_message::EVENT); 7794 msg->setInt32("node", mCodec->mNode); 7795 msg->setInt32("event", event); 7796 msg->setInt32("data1", data1); 7797 msg->setInt32("data2", data2); 7798 7799 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7800 mCodec->mComponentName.c_str()); 7801 7802 mCodec->deferMessage(msg); 7803 7804 return true; 7805 } 7806 7807 default: 7808 return BaseState::onOMXEvent(event, data1, data2); 7809 } 7810 7811 return true; 7812} 7813 7814void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7815 BaseState::onOutputBufferDrained(msg); 7816 7817 changeStateIfWeOwnAllBuffers(); 7818} 7819 7820void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7821 BaseState::onInputBufferFilled(msg); 7822 7823 changeStateIfWeOwnAllBuffers(); 7824} 7825 7826void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 7827 if (mFlushComplete[kPortIndexInput] 7828 && mFlushComplete[kPortIndexOutput] 7829 && mCodec->allYourBuffersAreBelongToUs()) { 7830 // We now own all buffers except possibly those still queued with 7831 // the native window for rendering. Let's get those back as well. 7832 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 7833 7834 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7835 7836 sp<AMessage> notify = mCodec->mNotify->dup(); 7837 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7838 notify->post(); 7839 7840 mCodec->mPortEOS[kPortIndexInput] = 7841 mCodec->mPortEOS[kPortIndexOutput] = false; 7842 7843 mCodec->mInputEOSResult = OK; 7844 7845 if (mCodec->mSkipCutBuffer != NULL) { 7846 mCodec->mSkipCutBuffer->clear(); 7847 } 7848 7849 mCodec->changeState(mCodec->mExecutingState); 7850 } 7851} 7852 7853status_t ACodec::queryCapabilities( 7854 const AString &name, const AString &mime, bool isEncoder, 7855 sp<MediaCodecInfo::Capabilities> *caps) { 7856 (*caps).clear(); 7857 const char *role = getComponentRole(isEncoder, mime.c_str()); 7858 if (role == NULL) { 7859 return BAD_VALUE; 7860 } 7861 7862 OMXClient client; 7863 status_t err = client.connect(); 7864 if (err != OK) { 7865 return err; 7866 } 7867 7868 sp<IOMX> omx = client.interface(); 7869 sp<CodecObserver> observer = new CodecObserver; 7870 IOMX::node_id node = 0; 7871 7872 err = omx->allocateNode(name.c_str(), observer, NULL, &node); 7873 if (err != OK) { 7874 client.disconnect(); 7875 return err; 7876 } 7877 7878 err = setComponentRole(omx, node, role); 7879 if (err != OK) { 7880 omx->freeNode(node); 7881 client.disconnect(); 7882 return err; 7883 } 7884 7885 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 7886 bool isVideo = mime.startsWithIgnoreCase("video/"); 7887 7888 if (isVideo) { 7889 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 7890 InitOMXParams(¶m); 7891 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7892 7893 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7894 param.nProfileIndex = index; 7895 status_t err = omx->getParameter( 7896 node, OMX_IndexParamVideoProfileLevelQuerySupported, 7897 ¶m, sizeof(param)); 7898 if (err != OK) { 7899 break; 7900 } 7901 builder->addProfileLevel(param.eProfile, param.eLevel); 7902 7903 if (index == kMaxIndicesToCheck) { 7904 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 7905 name.c_str(), index, 7906 param.eProfile, param.eLevel); 7907 } 7908 } 7909 7910 // Color format query 7911 // return colors in the order reported by the OMX component 7912 // prefix "flexible" standard ones with the flexible equivalent 7913 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 7914 InitOMXParams(&portFormat); 7915 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 7916 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 7917 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7918 portFormat.nIndex = index; 7919 status_t err = omx->getParameter( 7920 node, OMX_IndexParamVideoPortFormat, 7921 &portFormat, sizeof(portFormat)); 7922 if (err != OK) { 7923 break; 7924 } 7925 7926 OMX_U32 flexibleEquivalent; 7927 if (isFlexibleColorFormat( 7928 omx, node, portFormat.eColorFormat, false /* usingNativeWindow */, 7929 &flexibleEquivalent)) { 7930 bool marked = false; 7931 for (size_t i = 0; i < supportedColors.size(); ++i) { 7932 if (supportedColors[i] == flexibleEquivalent) { 7933 marked = true; 7934 break; 7935 } 7936 } 7937 if (!marked) { 7938 supportedColors.push(flexibleEquivalent); 7939 builder->addColorFormat(flexibleEquivalent); 7940 } 7941 } 7942 supportedColors.push(portFormat.eColorFormat); 7943 builder->addColorFormat(portFormat.eColorFormat); 7944 7945 if (index == kMaxIndicesToCheck) { 7946 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 7947 name.c_str(), index, 7948 asString(portFormat.eColorFormat), portFormat.eColorFormat); 7949 } 7950 } 7951 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 7952 // More audio codecs if they have profiles. 7953 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 7954 InitOMXParams(¶m); 7955 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7956 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7957 param.nProfileIndex = index; 7958 status_t err = omx->getParameter( 7959 node, (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 7960 ¶m, sizeof(param)); 7961 if (err != OK) { 7962 break; 7963 } 7964 // For audio, level is ignored. 7965 builder->addProfileLevel(param.eProfile, 0 /* level */); 7966 7967 if (index == kMaxIndicesToCheck) { 7968 ALOGW("[%s] stopping checking profiles after %u: %x", 7969 name.c_str(), index, 7970 param.eProfile); 7971 } 7972 } 7973 7974 // NOTE: Without Android extensions, OMX does not provide a way to query 7975 // AAC profile support 7976 if (param.nProfileIndex == 0) { 7977 ALOGW("component %s doesn't support profile query.", name.c_str()); 7978 } 7979 } 7980 7981 if (isVideo && !isEncoder) { 7982 native_handle_t *sidebandHandle = NULL; 7983 if (omx->configureVideoTunnelMode( 7984 node, kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 7985 // tunneled playback includes adaptive playback 7986 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 7987 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 7988 } else if (omx->storeMetaDataInBuffers( 7989 node, kPortIndexOutput, OMX_TRUE) == OK || 7990 omx->prepareForAdaptivePlayback( 7991 node, kPortIndexOutput, OMX_TRUE, 7992 1280 /* width */, 720 /* height */) == OK) { 7993 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 7994 } 7995 } 7996 7997 if (isVideo && isEncoder) { 7998 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 7999 InitOMXParams(¶ms); 8000 params.nPortIndex = kPortIndexOutput; 8001 // TODO: should we verify if fallback is supported? 8002 if (omx->getConfig( 8003 node, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 8004 ¶ms, sizeof(params)) == OK) { 8005 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 8006 } 8007 } 8008 8009 *caps = builder; 8010 omx->freeNode(node); 8011 client.disconnect(); 8012 return OK; 8013} 8014 8015// These are supposed be equivalent to the logic in 8016// "audio_channel_out_mask_from_count". 8017//static 8018status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 8019 switch (numChannels) { 8020 case 1: 8021 map[0] = OMX_AUDIO_ChannelCF; 8022 break; 8023 case 2: 8024 map[0] = OMX_AUDIO_ChannelLF; 8025 map[1] = OMX_AUDIO_ChannelRF; 8026 break; 8027 case 3: 8028 map[0] = OMX_AUDIO_ChannelLF; 8029 map[1] = OMX_AUDIO_ChannelRF; 8030 map[2] = OMX_AUDIO_ChannelCF; 8031 break; 8032 case 4: 8033 map[0] = OMX_AUDIO_ChannelLF; 8034 map[1] = OMX_AUDIO_ChannelRF; 8035 map[2] = OMX_AUDIO_ChannelLR; 8036 map[3] = OMX_AUDIO_ChannelRR; 8037 break; 8038 case 5: 8039 map[0] = OMX_AUDIO_ChannelLF; 8040 map[1] = OMX_AUDIO_ChannelRF; 8041 map[2] = OMX_AUDIO_ChannelCF; 8042 map[3] = OMX_AUDIO_ChannelLR; 8043 map[4] = OMX_AUDIO_ChannelRR; 8044 break; 8045 case 6: 8046 map[0] = OMX_AUDIO_ChannelLF; 8047 map[1] = OMX_AUDIO_ChannelRF; 8048 map[2] = OMX_AUDIO_ChannelCF; 8049 map[3] = OMX_AUDIO_ChannelLFE; 8050 map[4] = OMX_AUDIO_ChannelLR; 8051 map[5] = OMX_AUDIO_ChannelRR; 8052 break; 8053 case 7: 8054 map[0] = OMX_AUDIO_ChannelLF; 8055 map[1] = OMX_AUDIO_ChannelRF; 8056 map[2] = OMX_AUDIO_ChannelCF; 8057 map[3] = OMX_AUDIO_ChannelLFE; 8058 map[4] = OMX_AUDIO_ChannelLR; 8059 map[5] = OMX_AUDIO_ChannelRR; 8060 map[6] = OMX_AUDIO_ChannelCS; 8061 break; 8062 case 8: 8063 map[0] = OMX_AUDIO_ChannelLF; 8064 map[1] = OMX_AUDIO_ChannelRF; 8065 map[2] = OMX_AUDIO_ChannelCF; 8066 map[3] = OMX_AUDIO_ChannelLFE; 8067 map[4] = OMX_AUDIO_ChannelLR; 8068 map[5] = OMX_AUDIO_ChannelRR; 8069 map[6] = OMX_AUDIO_ChannelLS; 8070 map[7] = OMX_AUDIO_ChannelRS; 8071 break; 8072 default: 8073 return -EINVAL; 8074 } 8075 8076 return OK; 8077} 8078 8079} // namespace android 8080