ACodec.cpp revision ecc97eb44a0675974fcf43b0c68edaaa539d2996
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47 48#include <OMX_AudioExt.h> 49#include <OMX_VideoExt.h> 50#include <OMX_Component.h> 51#include <OMX_IndexExt.h> 52#include <OMX_AsString.h> 53 54#include "include/avc_utils.h" 55#include "include/DataConverter.h" 56#include "include/SharedMemoryBuffer.h" 57#include "omx/OMXUtils.h" 58 59namespace android { 60 61using binder::Status; 62 63enum { 64 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 65}; 66 67// OMX errors are directly mapped into status_t range if 68// there is no corresponding MediaError status code. 69// Use the statusFromOMXError(int32_t omxError) function. 70// 71// Currently this is a direct map. 72// See frameworks/native/include/media/openmax/OMX_Core.h 73// 74// Vendor OMX errors from 0x90000000 - 0x9000FFFF 75// Extension OMX errors from 0x8F000000 - 0x90000000 76// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 77// 78 79// returns true if err is a recognized OMX error code. 80// as OMX error is OMX_S32, this is an int32_t type 81static inline bool isOMXError(int32_t err) { 82 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 83} 84 85// converts an OMX error to a status_t 86static inline status_t statusFromOMXError(int32_t omxError) { 87 switch (omxError) { 88 case OMX_ErrorInvalidComponentName: 89 case OMX_ErrorComponentNotFound: 90 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 91 default: 92 return isOMXError(omxError) ? omxError : 0; // no translation required 93 } 94} 95 96static inline status_t statusFromBinderStatus(const Status &status) { 97 if (status.isOk()) { 98 return OK; 99 } 100 status_t err; 101 if ((err = status.serviceSpecificErrorCode()) != OK) { 102 return err; 103 } 104 if ((err = status.transactionError()) != OK) { 105 return err; 106 } 107 // Other exception 108 return UNKNOWN_ERROR; 109} 110 111// checks and converts status_t to a non-side-effect status_t 112static inline status_t makeNoSideEffectStatus(status_t err) { 113 switch (err) { 114 // the following errors have side effects and may come 115 // from other code modules. Remap for safety reasons. 116 case INVALID_OPERATION: 117 case DEAD_OBJECT: 118 return UNKNOWN_ERROR; 119 default: 120 return err; 121 } 122} 123 124struct MessageList : public RefBase { 125 MessageList() { 126 } 127 virtual ~MessageList() { 128 } 129 std::list<sp<AMessage> > &getList() { return mList; } 130private: 131 std::list<sp<AMessage> > mList; 132 133 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 134}; 135 136static sp<DataConverter> getCopyConverter() { 137 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited 138 static sp<DataConverter> sCopyConverter; // zero-inited 139 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); 140 return sCopyConverter; 141} 142 143struct CodecObserver : public BnOMXObserver { 144 CodecObserver() {} 145 146 void setNotificationMessage(const sp<AMessage> &msg) { 147 mNotify = msg; 148 } 149 150 // from IOMXObserver 151 virtual void onMessages(const std::list<omx_message> &messages) { 152 if (messages.empty()) { 153 return; 154 } 155 156 sp<AMessage> notify = mNotify->dup(); 157 sp<MessageList> msgList = new MessageList(); 158 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 159 it != messages.cend(); ++it) { 160 const omx_message &omx_msg = *it; 161 162 sp<AMessage> msg = new AMessage; 163 msg->setInt32("type", omx_msg.type); 164 switch (omx_msg.type) { 165 case omx_message::EVENT: 166 { 167 msg->setInt32("event", omx_msg.u.event_data.event); 168 msg->setInt32("data1", omx_msg.u.event_data.data1); 169 msg->setInt32("data2", omx_msg.u.event_data.data2); 170 break; 171 } 172 173 case omx_message::EMPTY_BUFFER_DONE: 174 { 175 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 176 msg->setInt32("fence_fd", omx_msg.fenceFd); 177 break; 178 } 179 180 case omx_message::FILL_BUFFER_DONE: 181 { 182 msg->setInt32( 183 "buffer", omx_msg.u.extended_buffer_data.buffer); 184 msg->setInt32( 185 "range_offset", 186 omx_msg.u.extended_buffer_data.range_offset); 187 msg->setInt32( 188 "range_length", 189 omx_msg.u.extended_buffer_data.range_length); 190 msg->setInt32( 191 "flags", 192 omx_msg.u.extended_buffer_data.flags); 193 msg->setInt64( 194 "timestamp", 195 omx_msg.u.extended_buffer_data.timestamp); 196 msg->setInt32( 197 "fence_fd", omx_msg.fenceFd); 198 break; 199 } 200 201 case omx_message::FRAME_RENDERED: 202 { 203 msg->setInt64( 204 "media_time_us", omx_msg.u.render_data.timestamp); 205 msg->setInt64( 206 "system_nano", omx_msg.u.render_data.nanoTime); 207 break; 208 } 209 210 default: 211 ALOGE("Unrecognized message type: %d", omx_msg.type); 212 break; 213 } 214 msgList->getList().push_back(msg); 215 } 216 notify->setObject("messages", msgList); 217 notify->post(); 218 } 219 220protected: 221 virtual ~CodecObserver() {} 222 223private: 224 sp<AMessage> mNotify; 225 226 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 227}; 228 229//////////////////////////////////////////////////////////////////////////////// 230 231struct ACodec::BaseState : public AState { 232 explicit BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 233 234protected: 235 enum PortMode { 236 KEEP_BUFFERS, 237 RESUBMIT_BUFFERS, 238 FREE_BUFFERS, 239 }; 240 241 ACodec *mCodec; 242 243 virtual PortMode getPortMode(OMX_U32 portIndex); 244 245 virtual bool onMessageReceived(const sp<AMessage> &msg); 246 247 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 248 249 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 250 virtual void onInputBufferFilled(const sp<AMessage> &msg); 251 252 void postFillThisBuffer(BufferInfo *info); 253 254private: 255 // Handles an OMX message. Returns true iff message was handled. 256 bool onOMXMessage(const sp<AMessage> &msg); 257 258 // Handles a list of messages. Returns true iff messages were handled. 259 bool onOMXMessageList(const sp<AMessage> &msg); 260 261 // returns true iff this message is for this component and the component is alive 262 bool checkOMXMessage(const sp<AMessage> &msg); 263 264 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 265 266 bool onOMXFillBufferDone( 267 IOMX::buffer_id bufferID, 268 size_t rangeOffset, size_t rangeLength, 269 OMX_U32 flags, 270 int64_t timeUs, 271 int fenceFd); 272 273 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 274 275 void getMoreInputDataIfPossible(); 276 277 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 278}; 279 280//////////////////////////////////////////////////////////////////////////////// 281 282struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 283 explicit DeathNotifier(const sp<AMessage> ¬ify) 284 : mNotify(notify) { 285 } 286 287 virtual void binderDied(const wp<IBinder> &) { 288 mNotify->post(); 289 } 290 291protected: 292 virtual ~DeathNotifier() {} 293 294private: 295 sp<AMessage> mNotify; 296 297 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 298}; 299 300struct ACodec::UninitializedState : public ACodec::BaseState { 301 explicit UninitializedState(ACodec *codec); 302 303protected: 304 virtual bool onMessageReceived(const sp<AMessage> &msg); 305 virtual void stateEntered(); 306 307private: 308 void onSetup(const sp<AMessage> &msg); 309 bool onAllocateComponent(const sp<AMessage> &msg); 310 311 sp<DeathNotifier> mDeathNotifier; 312 313 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 314}; 315 316//////////////////////////////////////////////////////////////////////////////// 317 318struct ACodec::LoadedState : public ACodec::BaseState { 319 explicit LoadedState(ACodec *codec); 320 321protected: 322 virtual bool onMessageReceived(const sp<AMessage> &msg); 323 virtual void stateEntered(); 324 325private: 326 friend struct ACodec::UninitializedState; 327 328 bool onConfigureComponent(const sp<AMessage> &msg); 329 void onCreateInputSurface(const sp<AMessage> &msg); 330 void onSetInputSurface(const sp<AMessage> &msg); 331 void onStart(); 332 void onShutdown(bool keepComponentAllocated); 333 334 status_t setupInputSurface(); 335 336 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 337}; 338 339//////////////////////////////////////////////////////////////////////////////// 340 341struct ACodec::LoadedToIdleState : public ACodec::BaseState { 342 explicit LoadedToIdleState(ACodec *codec); 343 344protected: 345 virtual bool onMessageReceived(const sp<AMessage> &msg); 346 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 347 virtual void stateEntered(); 348 349private: 350 status_t allocateBuffers(); 351 352 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 353}; 354 355//////////////////////////////////////////////////////////////////////////////// 356 357struct ACodec::IdleToExecutingState : public ACodec::BaseState { 358 explicit IdleToExecutingState(ACodec *codec); 359 360protected: 361 virtual bool onMessageReceived(const sp<AMessage> &msg); 362 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 363 virtual void stateEntered(); 364 365private: 366 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 367}; 368 369//////////////////////////////////////////////////////////////////////////////// 370 371struct ACodec::ExecutingState : public ACodec::BaseState { 372 explicit ExecutingState(ACodec *codec); 373 374 void submitRegularOutputBuffers(); 375 void submitOutputMetaBuffers(); 376 void submitOutputBuffers(); 377 378 // Submit output buffers to the decoder, submit input buffers to client 379 // to fill with data. 380 void resume(); 381 382 // Returns true iff input and output buffers are in play. 383 bool active() const { return mActive; } 384 385protected: 386 virtual PortMode getPortMode(OMX_U32 portIndex); 387 virtual bool onMessageReceived(const sp<AMessage> &msg); 388 virtual void stateEntered(); 389 390 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 391 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 392 393private: 394 bool mActive; 395 396 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 397}; 398 399//////////////////////////////////////////////////////////////////////////////// 400 401struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 402 explicit OutputPortSettingsChangedState(ACodec *codec); 403 404protected: 405 virtual PortMode getPortMode(OMX_U32 portIndex); 406 virtual bool onMessageReceived(const sp<AMessage> &msg); 407 virtual void stateEntered(); 408 409 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 410 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 411 412private: 413 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 414}; 415 416//////////////////////////////////////////////////////////////////////////////// 417 418struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 419 explicit ExecutingToIdleState(ACodec *codec); 420 421protected: 422 virtual bool onMessageReceived(const sp<AMessage> &msg); 423 virtual void stateEntered(); 424 425 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 426 427 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 428 virtual void onInputBufferFilled(const sp<AMessage> &msg); 429 430private: 431 void changeStateIfWeOwnAllBuffers(); 432 433 bool mComponentNowIdle; 434 435 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 436}; 437 438//////////////////////////////////////////////////////////////////////////////// 439 440struct ACodec::IdleToLoadedState : public ACodec::BaseState { 441 explicit IdleToLoadedState(ACodec *codec); 442 443protected: 444 virtual bool onMessageReceived(const sp<AMessage> &msg); 445 virtual void stateEntered(); 446 447 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 448 449private: 450 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 451}; 452 453//////////////////////////////////////////////////////////////////////////////// 454 455struct ACodec::FlushingState : public ACodec::BaseState { 456 explicit FlushingState(ACodec *codec); 457 458protected: 459 virtual bool onMessageReceived(const sp<AMessage> &msg); 460 virtual void stateEntered(); 461 462 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 463 464 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 465 virtual void onInputBufferFilled(const sp<AMessage> &msg); 466 467private: 468 bool mFlushComplete[2]; 469 470 void changeStateIfWeOwnAllBuffers(); 471 472 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 473}; 474 475//////////////////////////////////////////////////////////////////////////////// 476 477void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 478 if (mFenceFd >= 0) { 479 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 480 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 481 } 482 mFenceFd = fenceFd; 483 mIsReadFence = false; 484} 485 486void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 487 if (mFenceFd >= 0) { 488 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 489 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 490 } 491 mFenceFd = fenceFd; 492 mIsReadFence = true; 493} 494 495void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 496 if (mFenceFd >= 0 && mIsReadFence) { 497 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 498 } 499} 500 501void ACodec::BufferInfo::checkReadFence(const char *dbg) { 502 if (mFenceFd >= 0 && !mIsReadFence) { 503 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 504 } 505} 506 507//////////////////////////////////////////////////////////////////////////////// 508 509ACodec::ACodec() 510 : mSampleRate(0), 511 mNodeGeneration(0), 512 mUsingNativeWindow(false), 513 mNativeWindowUsageBits(0), 514 mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN), 515 mIsVideo(false), 516 mIsEncoder(false), 517 mFatalError(false), 518 mShutdownInProgress(false), 519 mExplicitShutdown(false), 520 mIsLegacyVP9Decoder(false), 521 mEncoderDelay(0), 522 mEncoderPadding(0), 523 mRotationDegrees(0), 524 mChannelMaskPresent(false), 525 mChannelMask(0), 526 mDequeueCounter(0), 527 mInputMetadataType(kMetadataBufferTypeInvalid), 528 mOutputMetadataType(kMetadataBufferTypeInvalid), 529 mLegacyAdaptiveExperiment(false), 530 mMetadataBuffersToSubmit(0), 531 mNumUndequeuedBuffers(0), 532 mRepeatFrameDelayUs(-1ll), 533 mMaxPtsGapUs(-1ll), 534 mMaxFps(-1), 535 mTimePerFrameUs(-1ll), 536 mTimePerCaptureUs(-1ll), 537 mCreateInputBuffersSuspended(false), 538 mTunneled(false), 539 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0), 540 mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0) { 541 mUninitializedState = new UninitializedState(this); 542 mLoadedState = new LoadedState(this); 543 mLoadedToIdleState = new LoadedToIdleState(this); 544 mIdleToExecutingState = new IdleToExecutingState(this); 545 mExecutingState = new ExecutingState(this); 546 547 mOutputPortSettingsChangedState = 548 new OutputPortSettingsChangedState(this); 549 550 mExecutingToIdleState = new ExecutingToIdleState(this); 551 mIdleToLoadedState = new IdleToLoadedState(this); 552 mFlushingState = new FlushingState(this); 553 554 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 555 mInputEOSResult = OK; 556 557 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 558 559 changeState(mUninitializedState); 560} 561 562ACodec::~ACodec() { 563} 564 565void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 566 mNotify = msg; 567} 568 569void ACodec::initiateSetup(const sp<AMessage> &msg) { 570 msg->setWhat(kWhatSetup); 571 msg->setTarget(this); 572 msg->post(); 573} 574 575void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 576 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 577 msg->setMessage("params", params); 578 msg->post(); 579} 580 581void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 582 msg->setWhat(kWhatAllocateComponent); 583 msg->setTarget(this); 584 msg->post(); 585} 586 587void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 588 msg->setWhat(kWhatConfigureComponent); 589 msg->setTarget(this); 590 msg->post(); 591} 592 593status_t ACodec::setSurface(const sp<Surface> &surface) { 594 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 595 msg->setObject("surface", surface); 596 597 sp<AMessage> response; 598 status_t err = msg->postAndAwaitResponse(&response); 599 600 if (err == OK) { 601 (void)response->findInt32("err", &err); 602 } 603 return err; 604} 605 606void ACodec::initiateCreateInputSurface() { 607 (new AMessage(kWhatCreateInputSurface, this))->post(); 608} 609 610void ACodec::initiateSetInputSurface( 611 const sp<PersistentSurface> &surface) { 612 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 613 msg->setObject("input-surface", surface); 614 msg->post(); 615} 616 617void ACodec::signalEndOfInputStream() { 618 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 619} 620 621void ACodec::initiateStart() { 622 (new AMessage(kWhatStart, this))->post(); 623} 624 625void ACodec::signalFlush() { 626 ALOGV("[%s] signalFlush", mComponentName.c_str()); 627 (new AMessage(kWhatFlush, this))->post(); 628} 629 630void ACodec::signalResume() { 631 (new AMessage(kWhatResume, this))->post(); 632} 633 634void ACodec::initiateShutdown(bool keepComponentAllocated) { 635 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 636 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 637 msg->post(); 638 if (!keepComponentAllocated) { 639 // ensure shutdown completes in 3 seconds 640 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 641 } 642} 643 644void ACodec::signalRequestIDRFrame() { 645 (new AMessage(kWhatRequestIDRFrame, this))->post(); 646} 647 648// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 649// Some codecs may return input buffers before having them processed. 650// This causes a halt if we already signaled an EOS on the input 651// port. For now keep submitting an output buffer if there was an 652// EOS on the input port, but not yet on the output port. 653void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 654 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 655 mMetadataBuffersToSubmit > 0) { 656 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 657 } 658} 659 660status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 661 // allow keeping unset surface 662 if (surface == NULL) { 663 if (mNativeWindow != NULL) { 664 ALOGW("cannot unset a surface"); 665 return INVALID_OPERATION; 666 } 667 return OK; 668 } 669 670 // cannot switch from bytebuffers to surface 671 if (mNativeWindow == NULL) { 672 ALOGW("component was not configured with a surface"); 673 return INVALID_OPERATION; 674 } 675 676 ANativeWindow *nativeWindow = surface.get(); 677 // if we have not yet started the codec, we can simply set the native window 678 if (mBuffers[kPortIndexInput].size() == 0) { 679 mNativeWindow = surface; 680 return OK; 681 } 682 683 // we do not support changing a tunneled surface after start 684 if (mTunneled) { 685 ALOGW("cannot change tunneled surface"); 686 return INVALID_OPERATION; 687 } 688 689 int usageBits = 0; 690 // no need to reconnect as we will not dequeue all buffers 691 status_t err = setupNativeWindowSizeFormatAndUsage( 692 nativeWindow, &usageBits, 693 !storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment /* reconnect */); 694 if (err != OK) { 695 return err; 696 } 697 698 int ignoredFlags = kVideoGrallocUsage; 699 // New output surface is not allowed to add new usage flag except ignored ones. 700 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 701 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 702 return BAD_VALUE; 703 } 704 705 // get min undequeued count. We cannot switch to a surface that has a higher 706 // undequeued count than we allocated. 707 int minUndequeuedBuffers = 0; 708 err = nativeWindow->query( 709 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 710 &minUndequeuedBuffers); 711 if (err != 0) { 712 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 713 strerror(-err), -err); 714 return err; 715 } 716 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 717 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 718 minUndequeuedBuffers, mNumUndequeuedBuffers); 719 return BAD_VALUE; 720 } 721 722 // we cannot change the number of output buffers while OMX is running 723 // set up surface to the same count 724 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 725 ALOGV("setting up surface for %zu buffers", buffers.size()); 726 727 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 728 if (err != 0) { 729 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 730 -err); 731 return err; 732 } 733 734 // need to enable allocation when attaching 735 surface->getIGraphicBufferProducer()->allowAllocation(true); 736 737 // for meta data mode, we move dequeud buffers to the new surface. 738 // for non-meta mode, we must move all registered buffers 739 for (size_t i = 0; i < buffers.size(); ++i) { 740 const BufferInfo &info = buffers[i]; 741 // skip undequeued buffers for meta data mode 742 if (storingMetadataInDecodedBuffers() 743 && !mLegacyAdaptiveExperiment 744 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 745 ALOGV("skipping buffer"); 746 continue; 747 } 748 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 749 750 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 751 if (err != OK) { 752 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 753 info.mGraphicBuffer->getNativeBuffer(), 754 strerror(-err), -err); 755 return err; 756 } 757 } 758 759 // cancel undequeued buffers to new surface 760 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 761 for (size_t i = 0; i < buffers.size(); ++i) { 762 BufferInfo &info = buffers.editItemAt(i); 763 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 764 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 765 err = nativeWindow->cancelBuffer( 766 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 767 info.mFenceFd = -1; 768 if (err != OK) { 769 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 770 info.mGraphicBuffer->getNativeBuffer(), 771 strerror(-err), -err); 772 return err; 773 } 774 } 775 } 776 // disallow further allocation 777 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 778 } 779 780 // push blank buffers to previous window if requested 781 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 782 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 783 } 784 785 mNativeWindow = nativeWindow; 786 mNativeWindowUsageBits = usageBits; 787 return OK; 788} 789 790status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 791 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 792 793 CHECK(mDealer[portIndex] == NULL); 794 CHECK(mBuffers[portIndex].isEmpty()); 795 796 status_t err; 797 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 798 if (storingMetadataInDecodedBuffers()) { 799 err = allocateOutputMetadataBuffers(); 800 } else { 801 err = allocateOutputBuffersFromNativeWindow(); 802 } 803 } else { 804 OMX_PARAM_PORTDEFINITIONTYPE def; 805 InitOMXParams(&def); 806 def.nPortIndex = portIndex; 807 808 err = mOMXNode->getParameter( 809 OMX_IndexParamPortDefinition, &def, sizeof(def)); 810 811 if (err == OK) { 812 MetadataBufferType type = 813 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 814 size_t bufSize = def.nBufferSize; 815 if (type == kMetadataBufferTypeANWBuffer) { 816 bufSize = sizeof(VideoNativeMetadata); 817 } else if (type == kMetadataBufferTypeNativeHandleSource) { 818 bufSize = sizeof(VideoNativeHandleMetadata); 819 } 820 821 // If using gralloc or native source input metadata buffers, allocate largest 822 // metadata size as we prefer to generate native source metadata, but component 823 // may require gralloc source. For camera source, allocate at least enough 824 // size for native metadata buffers. 825 size_t allottedSize = bufSize; 826 if (portIndex == kPortIndexInput && type == kMetadataBufferTypeANWBuffer) { 827 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 828 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 829 bufSize = max(bufSize, sizeof(VideoNativeMetadata)); 830 } 831 832 size_t conversionBufferSize = 0; 833 834 sp<DataConverter> converter = mConverter[portIndex]; 835 if (converter != NULL) { 836 // here we assume sane conversions of max 4:1, so result fits in int32 837 if (portIndex == kPortIndexInput) { 838 conversionBufferSize = converter->sourceSize(bufSize); 839 } else { 840 conversionBufferSize = converter->targetSize(bufSize); 841 } 842 } 843 844 size_t alignment = MemoryDealer::getAllocationAlignment(); 845 846 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", 847 mComponentName.c_str(), 848 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 849 portIndex == kPortIndexInput ? "input" : "output"); 850 851 // verify buffer sizes to avoid overflow in align() 852 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { 853 ALOGE("b/22885421"); 854 return NO_MEMORY; 855 } 856 857 // don't modify bufSize as OMX may not expect it to increase after negotiation 858 size_t alignedSize = align(bufSize, alignment); 859 size_t alignedConvSize = align(conversionBufferSize, alignment); 860 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { 861 ALOGE("b/22885421"); 862 return NO_MEMORY; 863 } 864 865 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); 866 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 867 868 const sp<AMessage> &format = portIndex == kPortIndexInput ? mInputFormat : mOutputFormat; 869 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 870 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 871 if (mem == NULL || mem->pointer() == NULL) { 872 return NO_MEMORY; 873 } 874 875 BufferInfo info; 876 info.mStatus = BufferInfo::OWNED_BY_US; 877 info.mFenceFd = -1; 878 info.mRenderInfo = NULL; 879 info.mNativeHandle = NULL; 880 881 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 882 mem.clear(); 883 884 void *ptr = NULL; 885 sp<NativeHandle> native_handle; 886 err = mOMXNode->allocateSecureBuffer( 887 portIndex, bufSize, &info.mBufferID, 888 &ptr, &native_handle); 889 890 // TRICKY: this representation is unorthodox, but ACodec requires 891 // an ABuffer with a proper size to validate range offsets and lengths. 892 // Since mData is never referenced for secure input, it is used to store 893 // either the pointer to the secure buffer, or the opaque handle as on 894 // some devices ptr is actually an opaque handle, not a pointer. 895 896 // TRICKY2: use native handle as the base of the ABuffer if received one, 897 // because Widevine source only receives these base addresses. 898 const native_handle_t *native_handle_ptr = 899 native_handle == NULL ? NULL : native_handle->handle(); 900 info.mData = new MediaCodecBuffer(format, 901 new ABuffer(ptr != NULL ? ptr : (void *)native_handle_ptr, bufSize)); 902 info.mNativeHandle = native_handle; 903 info.mCodecData = info.mData; 904 } else { 905 err = mOMXNode->useBuffer( 906 portIndex, mem, &info.mBufferID, allottedSize); 907 } 908 909 if (mem != NULL) { 910 info.mCodecData = new SharedMemoryBuffer(format, mem); 911 info.mCodecRef = mem; 912 913 if (type == kMetadataBufferTypeANWBuffer) { 914 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 915 } 916 917 // if we require conversion, allocate conversion buffer for client use; 918 // otherwise, reuse codec buffer 919 if (mConverter[portIndex] != NULL) { 920 CHECK_GT(conversionBufferSize, (size_t)0); 921 mem = mDealer[portIndex]->allocate(conversionBufferSize); 922 if (mem == NULL|| mem->pointer() == NULL) { 923 return NO_MEMORY; 924 } 925 info.mData = new SharedMemoryBuffer(format, mem); 926 info.mMemRef = mem; 927 } else { 928 info.mData = info.mCodecData; 929 info.mMemRef = info.mCodecRef; 930 } 931 } 932 933 mBuffers[portIndex].push(info); 934 } 935 } 936 } 937 938 if (err != OK) { 939 return err; 940 } 941 942 sp<AMessage> notify = mNotify->dup(); 943 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 944 945 notify->setInt32("portIndex", portIndex); 946 947 sp<PortDescription> desc = new PortDescription; 948 949 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 950 const BufferInfo &info = mBuffers[portIndex][i]; 951 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef); 952 } 953 954 notify->setObject("portDesc", desc); 955 notify->post(); 956 957 return OK; 958} 959 960status_t ACodec::setupNativeWindowSizeFormatAndUsage( 961 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */, 962 bool reconnect) { 963 OMX_PARAM_PORTDEFINITIONTYPE def; 964 InitOMXParams(&def); 965 def.nPortIndex = kPortIndexOutput; 966 967 status_t err = mOMXNode->getParameter( 968 OMX_IndexParamPortDefinition, &def, sizeof(def)); 969 970 if (err != OK) { 971 return err; 972 } 973 974 OMX_U32 usage = 0; 975 err = mOMXNode->getGraphicBufferUsage(kPortIndexOutput, &usage); 976 if (err != 0) { 977 ALOGW("querying usage flags from OMX IL component failed: %d", err); 978 // XXX: Currently this error is logged, but not fatal. 979 usage = 0; 980 } 981 int omxUsage = usage; 982 983 if (mFlags & kFlagIsGrallocUsageProtected) { 984 usage |= GRALLOC_USAGE_PROTECTED; 985 } 986 987 usage |= kVideoGrallocUsage; 988 *finalUsage = usage; 989 990 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 991 mLastNativeWindowDataSpace = HAL_DATASPACE_UNKNOWN; 992 993 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 994 return setNativeWindowSizeFormatAndUsage( 995 nativeWindow, 996 def.format.video.nFrameWidth, 997 def.format.video.nFrameHeight, 998 def.format.video.eColorFormat, 999 mRotationDegrees, 1000 usage, 1001 reconnect); 1002} 1003 1004status_t ACodec::configureOutputBuffersFromNativeWindow( 1005 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 1006 OMX_U32 *minUndequeuedBuffers, bool preregister) { 1007 1008 OMX_PARAM_PORTDEFINITIONTYPE def; 1009 InitOMXParams(&def); 1010 def.nPortIndex = kPortIndexOutput; 1011 1012 status_t err = mOMXNode->getParameter( 1013 OMX_IndexParamPortDefinition, &def, sizeof(def)); 1014 1015 if (err == OK) { 1016 err = setupNativeWindowSizeFormatAndUsage( 1017 mNativeWindow.get(), &mNativeWindowUsageBits, preregister /* reconnect */); 1018 } 1019 if (err != OK) { 1020 mNativeWindowUsageBits = 0; 1021 return err; 1022 } 1023 1024 // Exits here for tunneled video playback codecs -- i.e. skips native window 1025 // buffer allocation step as this is managed by the tunneled OMX omponent 1026 // itself and explicitly sets def.nBufferCountActual to 0. 1027 if (mTunneled) { 1028 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 1029 def.nBufferCountActual = 0; 1030 err = mOMXNode->setParameter( 1031 OMX_IndexParamPortDefinition, &def, sizeof(def)); 1032 1033 *minUndequeuedBuffers = 0; 1034 *bufferCount = 0; 1035 *bufferSize = 0; 1036 return err; 1037 } 1038 1039 *minUndequeuedBuffers = 0; 1040 err = mNativeWindow->query( 1041 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 1042 (int *)minUndequeuedBuffers); 1043 1044 if (err != 0) { 1045 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 1046 strerror(-err), -err); 1047 return err; 1048 } 1049 1050 // FIXME: assume that surface is controlled by app (native window 1051 // returns the number for the case when surface is not controlled by app) 1052 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 1053 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 1054 1055 // Use conservative allocation while also trying to reduce starvation 1056 // 1057 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 1058 // minimum needed for the consumer to be able to work 1059 // 2. try to allocate two (2) additional buffers to reduce starvation from 1060 // the consumer 1061 // plus an extra buffer to account for incorrect minUndequeuedBufs 1062 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 1063 OMX_U32 newBufferCount = 1064 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1065 def.nBufferCountActual = newBufferCount; 1066 err = mOMXNode->setParameter( 1067 OMX_IndexParamPortDefinition, &def, sizeof(def)); 1068 1069 if (err == OK) { 1070 *minUndequeuedBuffers += extraBuffers; 1071 break; 1072 } 1073 1074 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1075 mComponentName.c_str(), newBufferCount, err); 1076 /* exit condition */ 1077 if (extraBuffers == 0) { 1078 return err; 1079 } 1080 } 1081 1082 err = native_window_set_buffer_count( 1083 mNativeWindow.get(), def.nBufferCountActual); 1084 1085 if (err != 0) { 1086 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1087 -err); 1088 return err; 1089 } 1090 1091 *bufferCount = def.nBufferCountActual; 1092 *bufferSize = def.nBufferSize; 1093 return err; 1094} 1095 1096status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1097 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1098 status_t err = configureOutputBuffersFromNativeWindow( 1099 &bufferCount, &bufferSize, &minUndequeuedBuffers, true /* preregister */); 1100 if (err != 0) 1101 return err; 1102 mNumUndequeuedBuffers = minUndequeuedBuffers; 1103 1104 if (!storingMetadataInDecodedBuffers()) { 1105 static_cast<Surface*>(mNativeWindow.get()) 1106 ->getIGraphicBufferProducer()->allowAllocation(true); 1107 } 1108 1109 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1110 "output port", 1111 mComponentName.c_str(), bufferCount, bufferSize); 1112 1113 // Dequeue buffers and send them to OMX 1114 for (OMX_U32 i = 0; i < bufferCount; i++) { 1115 ANativeWindowBuffer *buf; 1116 int fenceFd; 1117 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1118 if (err != 0) { 1119 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1120 break; 1121 } 1122 1123 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1124 BufferInfo info; 1125 info.mStatus = BufferInfo::OWNED_BY_US; 1126 info.mFenceFd = fenceFd; 1127 info.mIsReadFence = false; 1128 info.mRenderInfo = NULL; 1129 info.mGraphicBuffer = graphicBuffer; 1130 mBuffers[kPortIndexOutput].push(info); 1131 1132 IOMX::buffer_id bufferId; 1133 err = mOMXNode->useGraphicBuffer( 1134 kPortIndexOutput, graphicBuffer, &bufferId); 1135 if (err != 0) { 1136 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1137 "%d", i, err); 1138 break; 1139 } 1140 1141 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1142 1143 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1144 mComponentName.c_str(), 1145 bufferId, graphicBuffer.get()); 1146 } 1147 1148 OMX_U32 cancelStart; 1149 OMX_U32 cancelEnd; 1150 1151 if (err != 0) { 1152 // If an error occurred while dequeuing we need to cancel any buffers 1153 // that were dequeued. 1154 cancelStart = 0; 1155 cancelEnd = mBuffers[kPortIndexOutput].size(); 1156 } else { 1157 // Return the required minimum undequeued buffers to the native window. 1158 cancelStart = bufferCount - minUndequeuedBuffers; 1159 cancelEnd = bufferCount; 1160 } 1161 1162 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1163 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1164 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1165 status_t error = cancelBufferToNativeWindow(info); 1166 if (err == 0) { 1167 err = error; 1168 } 1169 } 1170 } 1171 1172 if (!storingMetadataInDecodedBuffers()) { 1173 static_cast<Surface*>(mNativeWindow.get()) 1174 ->getIGraphicBufferProducer()->allowAllocation(false); 1175 } 1176 1177 return err; 1178} 1179 1180status_t ACodec::allocateOutputMetadataBuffers() { 1181 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1182 status_t err = configureOutputBuffersFromNativeWindow( 1183 &bufferCount, &bufferSize, &minUndequeuedBuffers, 1184 mLegacyAdaptiveExperiment /* preregister */); 1185 if (err != 0) 1186 return err; 1187 mNumUndequeuedBuffers = minUndequeuedBuffers; 1188 1189 ALOGV("[%s] Allocating %u meta buffers on output port", 1190 mComponentName.c_str(), bufferCount); 1191 1192 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1193 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1194 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1195 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1196 1197 // Dequeue buffers and send them to OMX 1198 for (OMX_U32 i = 0; i < bufferCount; i++) { 1199 BufferInfo info; 1200 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1201 info.mFenceFd = -1; 1202 info.mRenderInfo = NULL; 1203 info.mGraphicBuffer = NULL; 1204 info.mDequeuedAt = mDequeueCounter; 1205 1206 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1207 if (mem == NULL || mem->pointer() == NULL) { 1208 return NO_MEMORY; 1209 } 1210 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1211 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1212 } 1213 info.mData = new SharedMemoryBuffer(mOutputFormat, mem); 1214 info.mMemRef = mem; 1215 info.mCodecData = info.mData; 1216 info.mCodecRef = mem; 1217 1218 err = mOMXNode->useBuffer( 1219 kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1220 mBuffers[kPortIndexOutput].push(info); 1221 1222 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1223 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1224 } 1225 1226 if (mLegacyAdaptiveExperiment) { 1227 // preallocate and preregister buffers 1228 static_cast<Surface *>(mNativeWindow.get()) 1229 ->getIGraphicBufferProducer()->allowAllocation(true); 1230 1231 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1232 "output port", 1233 mComponentName.c_str(), bufferCount, bufferSize); 1234 1235 // Dequeue buffers then cancel them all 1236 for (OMX_U32 i = 0; i < bufferCount; i++) { 1237 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1238 1239 ANativeWindowBuffer *buf; 1240 int fenceFd; 1241 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1242 if (err != 0) { 1243 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1244 break; 1245 } 1246 1247 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1248 mOMXNode->updateGraphicBufferInMeta( 1249 kPortIndexOutput, graphicBuffer, info->mBufferID); 1250 info->mStatus = BufferInfo::OWNED_BY_US; 1251 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1252 info->mGraphicBuffer = graphicBuffer; 1253 } 1254 1255 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1256 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1257 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1258 status_t error = cancelBufferToNativeWindow(info); 1259 if (err == OK) { 1260 err = error; 1261 } 1262 } 1263 } 1264 1265 static_cast<Surface*>(mNativeWindow.get()) 1266 ->getIGraphicBufferProducer()->allowAllocation(false); 1267 } 1268 1269 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1270 return err; 1271} 1272 1273status_t ACodec::submitOutputMetadataBuffer() { 1274 CHECK(storingMetadataInDecodedBuffers()); 1275 if (mMetadataBuffersToSubmit == 0) 1276 return OK; 1277 1278 BufferInfo *info = dequeueBufferFromNativeWindow(); 1279 if (info == NULL) { 1280 return ERROR_IO; 1281 } 1282 1283 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1284 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1285 1286 --mMetadataBuffersToSubmit; 1287 info->checkWriteFence("submitOutputMetadataBuffer"); 1288 status_t err = mOMXNode->fillBuffer(info->mBufferID, info->mFenceFd); 1289 info->mFenceFd = -1; 1290 if (err == OK) { 1291 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1292 } 1293 1294 return err; 1295} 1296 1297status_t ACodec::waitForFence(int fd, const char *dbg ) { 1298 status_t res = OK; 1299 if (fd >= 0) { 1300 sp<Fence> fence = new Fence(fd); 1301 res = fence->wait(IOMX::kFenceTimeoutMs); 1302 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1303 } 1304 return res; 1305} 1306 1307// static 1308const char *ACodec::_asString(BufferInfo::Status s) { 1309 switch (s) { 1310 case BufferInfo::OWNED_BY_US: return "OUR"; 1311 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1312 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1313 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1314 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1315 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1316 default: return "?"; 1317 } 1318} 1319 1320void ACodec::dumpBuffers(OMX_U32 portIndex) { 1321 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1322 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1323 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1324 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1325 const BufferInfo &info = mBuffers[portIndex][i]; 1326 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1327 i, info.mBufferID, info.mGraphicBuffer.get(), 1328 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1329 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1330 } 1331} 1332 1333status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1334 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1335 1336 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1337 mComponentName.c_str(), info->mBufferID); 1338 1339 info->checkWriteFence("cancelBufferToNativeWindow"); 1340 int err = mNativeWindow->cancelBuffer( 1341 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1342 info->mFenceFd = -1; 1343 1344 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1345 mComponentName.c_str(), info->mBufferID); 1346 // change ownership even if cancelBuffer fails 1347 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1348 1349 return err; 1350} 1351 1352void ACodec::updateRenderInfoForDequeuedBuffer( 1353 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1354 1355 info->mRenderInfo = 1356 mRenderTracker.updateInfoForDequeuedBuffer( 1357 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1358 1359 // check for any fences already signaled 1360 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1361} 1362 1363void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1364 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1365 mRenderTracker.dumpRenderQueue(); 1366 } 1367} 1368 1369void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1370 sp<AMessage> msg = mNotify->dup(); 1371 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1372 std::list<FrameRenderTracker::Info> done = 1373 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1374 1375 // unlink untracked frames 1376 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1377 it != done.cend(); ++it) { 1378 ssize_t index = it->getIndex(); 1379 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1380 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1381 } else if (index >= 0) { 1382 // THIS SHOULD NEVER HAPPEN 1383 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1384 } 1385 } 1386 1387 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1388 msg->post(); 1389 } 1390} 1391 1392ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1393 ANativeWindowBuffer *buf; 1394 CHECK(mNativeWindow.get() != NULL); 1395 1396 if (mTunneled) { 1397 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1398 " video playback mode mode!"); 1399 return NULL; 1400 } 1401 1402 if (mFatalError) { 1403 ALOGW("not dequeuing from native window due to fatal error"); 1404 return NULL; 1405 } 1406 1407 int fenceFd = -1; 1408 do { 1409 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1410 if (err != 0) { 1411 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1412 return NULL; 1413 } 1414 1415 bool stale = false; 1416 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1417 i--; 1418 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1419 1420 if (info->mGraphicBuffer != NULL && 1421 info->mGraphicBuffer->handle == buf->handle) { 1422 // Since consumers can attach buffers to BufferQueues, it is possible 1423 // that a known yet stale buffer can return from a surface that we 1424 // once used. We can simply ignore this as we have already dequeued 1425 // this buffer properly. NOTE: this does not eliminate all cases, 1426 // e.g. it is possible that we have queued the valid buffer to the 1427 // NW, and a stale copy of the same buffer gets dequeued - which will 1428 // be treated as the valid buffer by ACodec. 1429 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1430 ALOGI("dequeued stale buffer %p. discarding", buf); 1431 stale = true; 1432 break; 1433 } 1434 1435 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1436 info->mStatus = BufferInfo::OWNED_BY_US; 1437 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1438 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1439 return info; 1440 } 1441 } 1442 1443 // It is also possible to receive a previously unregistered buffer 1444 // in non-meta mode. These should be treated as stale buffers. The 1445 // same is possible in meta mode, in which case, it will be treated 1446 // as a normal buffer, which is not desirable. 1447 // TODO: fix this. 1448 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1449 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1450 stale = true; 1451 } 1452 if (stale) { 1453 // TODO: detach stale buffer, but there is no API yet to do it. 1454 buf = NULL; 1455 } 1456 } while (buf == NULL); 1457 1458 // get oldest undequeued buffer 1459 BufferInfo *oldest = NULL; 1460 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1461 i--; 1462 BufferInfo *info = 1463 &mBuffers[kPortIndexOutput].editItemAt(i); 1464 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1465 (oldest == NULL || 1466 // avoid potential issues from counter rolling over 1467 mDequeueCounter - info->mDequeuedAt > 1468 mDequeueCounter - oldest->mDequeuedAt)) { 1469 oldest = info; 1470 } 1471 } 1472 1473 // it is impossible dequeue a buffer when there are no buffers with ANW 1474 CHECK(oldest != NULL); 1475 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1476 // while loop above does not complete 1477 CHECK(storingMetadataInDecodedBuffers()); 1478 1479 // discard buffer in LRU info and replace with new buffer 1480 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1481 oldest->mStatus = BufferInfo::OWNED_BY_US; 1482 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1483 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1484 oldest->mRenderInfo = NULL; 1485 1486 mOMXNode->updateGraphicBufferInMeta( 1487 kPortIndexOutput, oldest->mGraphicBuffer, oldest->mBufferID); 1488 1489 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1490 VideoGrallocMetadata *grallocMeta = 1491 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->data()); 1492 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1493 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1494 mDequeueCounter - oldest->mDequeuedAt, 1495 (void *)(uintptr_t)grallocMeta->pHandle, 1496 oldest->mGraphicBuffer->handle, oldest->mData->data()); 1497 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1498 VideoNativeMetadata *nativeMeta = 1499 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->data()); 1500 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1501 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1502 mDequeueCounter - oldest->mDequeuedAt, 1503 (void *)(uintptr_t)nativeMeta->pBuffer, 1504 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->data()); 1505 } 1506 1507 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1508 return oldest; 1509} 1510 1511status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1512 status_t err = OK; 1513 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1514 i--; 1515 status_t err2 = freeBuffer(portIndex, i); 1516 if (err == OK) { 1517 err = err2; 1518 } 1519 } 1520 1521 // clear mDealer even on an error 1522 mDealer[portIndex].clear(); 1523 return err; 1524} 1525 1526status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1527 status_t err = OK; 1528 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1529 i--; 1530 BufferInfo *info = 1531 &mBuffers[kPortIndexOutput].editItemAt(i); 1532 1533 // At this time some buffers may still be with the component 1534 // or being drained. 1535 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1536 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1537 status_t err2 = freeBuffer(kPortIndexOutput, i); 1538 if (err == OK) { 1539 err = err2; 1540 } 1541 } 1542 } 1543 1544 return err; 1545} 1546 1547status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1548 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1549 status_t err = OK; 1550 1551 // there should not be any fences in the metadata 1552 MetadataBufferType type = 1553 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1554 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL 1555 && info->mData->size() >= sizeof(VideoNativeMetadata)) { 1556 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; 1557 if (fenceFd >= 0) { 1558 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1559 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1560 } 1561 } 1562 1563 switch (info->mStatus) { 1564 case BufferInfo::OWNED_BY_US: 1565 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1566 (void)cancelBufferToNativeWindow(info); 1567 } 1568 // fall through 1569 1570 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1571 err = mOMXNode->freeBuffer(portIndex, info->mBufferID); 1572 break; 1573 1574 default: 1575 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1576 err = FAILED_TRANSACTION; 1577 break; 1578 } 1579 1580 if (info->mFenceFd >= 0) { 1581 ::close(info->mFenceFd); 1582 } 1583 1584 if (portIndex == kPortIndexOutput) { 1585 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1586 info->mRenderInfo = NULL; 1587 } 1588 1589 // remove buffer even if mOMXNode->freeBuffer fails 1590 mBuffers[portIndex].removeAt(i); 1591 return err; 1592} 1593 1594ACodec::BufferInfo *ACodec::findBufferByID( 1595 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1596 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1597 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1598 1599 if (info->mBufferID == bufferID) { 1600 if (index != NULL) { 1601 *index = i; 1602 } 1603 return info; 1604 } 1605 } 1606 1607 ALOGE("Could not find buffer with ID %u", bufferID); 1608 return NULL; 1609} 1610 1611status_t ACodec::setComponentRole( 1612 bool isEncoder, const char *mime) { 1613 const char *role = GetComponentRole(isEncoder, mime); 1614 if (role == NULL) { 1615 return BAD_VALUE; 1616 } 1617 status_t err = SetComponentRole(mOMXNode, role); 1618 if (err != OK) { 1619 ALOGW("[%s] Failed to set standard component role '%s'.", 1620 mComponentName.c_str(), role); 1621 } 1622 return err; 1623} 1624 1625status_t ACodec::configureCodec( 1626 const char *mime, const sp<AMessage> &msg) { 1627 int32_t encoder; 1628 if (!msg->findInt32("encoder", &encoder)) { 1629 encoder = false; 1630 } 1631 1632 sp<AMessage> inputFormat = new AMessage; 1633 sp<AMessage> outputFormat = new AMessage; 1634 mConfigFormat = msg; 1635 1636 mIsEncoder = encoder; 1637 1638 mInputMetadataType = kMetadataBufferTypeInvalid; 1639 mOutputMetadataType = kMetadataBufferTypeInvalid; 1640 1641 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1642 1643 if (err != OK) { 1644 return err; 1645 } 1646 1647 int32_t bitRate = 0; 1648 // FLAC encoder doesn't need a bitrate, other encoders do 1649 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1650 && !msg->findInt32("bitrate", &bitRate)) { 1651 return INVALID_OPERATION; 1652 } 1653 1654 // propagate bitrate to the output so that the muxer has it 1655 if (encoder && msg->findInt32("bitrate", &bitRate)) { 1656 // Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the 1657 // average bitrate. We've been setting both bitrate and max-bitrate to this same value. 1658 outputFormat->setInt32("bitrate", bitRate); 1659 outputFormat->setInt32("max-bitrate", bitRate); 1660 } 1661 1662 int32_t storeMeta; 1663 if (encoder 1664 && msg->findInt32("android._input-metadata-buffer-type", &storeMeta) 1665 && storeMeta != kMetadataBufferTypeInvalid) { 1666 mInputMetadataType = (MetadataBufferType)storeMeta; 1667 err = mOMXNode->storeMetaDataInBuffers( 1668 kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1669 if (err != OK) { 1670 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1671 mComponentName.c_str(), err); 1672 1673 return err; 1674 } else if (storeMeta == kMetadataBufferTypeANWBuffer 1675 && mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1676 // IOMX translates ANWBuffers to gralloc source already. 1677 mInputMetadataType = (MetadataBufferType)storeMeta; 1678 } 1679 1680 uint32_t usageBits; 1681 if (mOMXNode->getParameter( 1682 (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1683 &usageBits, sizeof(usageBits)) == OK) { 1684 inputFormat->setInt32( 1685 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1686 } 1687 } 1688 1689 int32_t prependSPSPPS = 0; 1690 if (encoder 1691 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1692 && prependSPSPPS != 0) { 1693 OMX_INDEXTYPE index; 1694 err = mOMXNode->getExtensionIndex( 1695 "OMX.google.android.index.prependSPSPPSToIDRFrames", &index); 1696 1697 if (err == OK) { 1698 PrependSPSPPSToIDRFramesParams params; 1699 InitOMXParams(¶ms); 1700 params.bEnable = OMX_TRUE; 1701 1702 err = mOMXNode->setParameter(index, ¶ms, sizeof(params)); 1703 } 1704 1705 if (err != OK) { 1706 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1707 "IDR frames. (err %d)", err); 1708 1709 return err; 1710 } 1711 } 1712 1713 // Only enable metadata mode on encoder output if encoder can prepend 1714 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1715 // opaque handle, to which we don't have access. 1716 int32_t video = !strncasecmp(mime, "video/", 6); 1717 mIsVideo = video; 1718 if (encoder && video) { 1719 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1720 && msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta) 1721 && storeMeta != 0); 1722 1723 mOutputMetadataType = kMetadataBufferTypeNativeHandleSource; 1724 err = mOMXNode->storeMetaDataInBuffers(kPortIndexOutput, enable, &mOutputMetadataType); 1725 if (err != OK) { 1726 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1727 mComponentName.c_str(), err); 1728 } 1729 1730 if (!msg->findInt64( 1731 "repeat-previous-frame-after", 1732 &mRepeatFrameDelayUs)) { 1733 mRepeatFrameDelayUs = -1ll; 1734 } 1735 1736 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1737 mMaxPtsGapUs = -1ll; 1738 } 1739 1740 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1741 mMaxFps = -1; 1742 } 1743 1744 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1745 mTimePerCaptureUs = -1ll; 1746 } 1747 1748 if (!msg->findInt32( 1749 "create-input-buffers-suspended", 1750 (int32_t*)&mCreateInputBuffersSuspended)) { 1751 mCreateInputBuffersSuspended = false; 1752 } 1753 } 1754 1755 // NOTE: we only use native window for video decoders 1756 sp<RefBase> obj; 1757 bool haveNativeWindow = msg->findObject("native-window", &obj) 1758 && obj != NULL && video && !encoder; 1759 mUsingNativeWindow = haveNativeWindow; 1760 mLegacyAdaptiveExperiment = false; 1761 if (video && !encoder) { 1762 inputFormat->setInt32("adaptive-playback", false); 1763 1764 int32_t usageProtected; 1765 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1766 if (!haveNativeWindow) { 1767 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1768 return PERMISSION_DENIED; 1769 } 1770 mFlags |= kFlagIsGrallocUsageProtected; 1771 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1772 } 1773 1774 if (mFlags & kFlagIsSecure) { 1775 // use native_handles for secure input buffers 1776 err = mOMXNode->enableNativeBuffers( 1777 kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1778 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1779 err = OK; // ignore error for now 1780 } 1781 } 1782 if (haveNativeWindow) { 1783 sp<ANativeWindow> nativeWindow = 1784 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1785 1786 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1787 int32_t autoFrc; 1788 if (msg->findInt32("auto-frc", &autoFrc)) { 1789 bool enabled = autoFrc; 1790 OMX_CONFIG_BOOLEANTYPE config; 1791 InitOMXParams(&config); 1792 config.bEnabled = (OMX_BOOL)enabled; 1793 status_t temp = mOMXNode->setConfig( 1794 (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1795 &config, sizeof(config)); 1796 if (temp == OK) { 1797 outputFormat->setInt32("auto-frc", enabled); 1798 } else if (enabled) { 1799 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1800 } 1801 } 1802 // END of temporary support for automatic FRC 1803 1804 int32_t tunneled; 1805 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1806 tunneled != 0) { 1807 ALOGI("Configuring TUNNELED video playback."); 1808 mTunneled = true; 1809 1810 int32_t audioHwSync = 0; 1811 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1812 ALOGW("No Audio HW Sync provided for video tunnel"); 1813 } 1814 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1815 if (err != OK) { 1816 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1817 audioHwSync, nativeWindow.get()); 1818 return err; 1819 } 1820 1821 int32_t maxWidth = 0, maxHeight = 0; 1822 if (msg->findInt32("max-width", &maxWidth) && 1823 msg->findInt32("max-height", &maxHeight)) { 1824 1825 err = mOMXNode->prepareForAdaptivePlayback( 1826 kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1827 if (err != OK) { 1828 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1829 mComponentName.c_str(), err); 1830 // allow failure 1831 err = OK; 1832 } else { 1833 inputFormat->setInt32("max-width", maxWidth); 1834 inputFormat->setInt32("max-height", maxHeight); 1835 inputFormat->setInt32("adaptive-playback", true); 1836 } 1837 } 1838 } else { 1839 ALOGV("Configuring CPU controlled video playback."); 1840 mTunneled = false; 1841 1842 // Explicity reset the sideband handle of the window for 1843 // non-tunneled video in case the window was previously used 1844 // for a tunneled video playback. 1845 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1846 if (err != OK) { 1847 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1848 return err; 1849 } 1850 1851 // Always try to enable dynamic output buffers on native surface 1852 mOutputMetadataType = kMetadataBufferTypeANWBuffer; 1853 err = mOMXNode->storeMetaDataInBuffers( 1854 kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1855 if (err != OK) { 1856 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1857 mComponentName.c_str(), err); 1858 1859 // if adaptive playback has been requested, try JB fallback 1860 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1861 // LARGE MEMORY REQUIREMENT 1862 1863 // we will not do adaptive playback on software accessed 1864 // surfaces as they never had to respond to changes in the 1865 // crop window, and we don't trust that they will be able to. 1866 int usageBits = 0; 1867 bool canDoAdaptivePlayback; 1868 1869 if (nativeWindow->query( 1870 nativeWindow.get(), 1871 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1872 &usageBits) != OK) { 1873 canDoAdaptivePlayback = false; 1874 } else { 1875 canDoAdaptivePlayback = 1876 (usageBits & 1877 (GRALLOC_USAGE_SW_READ_MASK | 1878 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1879 } 1880 1881 int32_t maxWidth = 0, maxHeight = 0; 1882 if (canDoAdaptivePlayback && 1883 msg->findInt32("max-width", &maxWidth) && 1884 msg->findInt32("max-height", &maxHeight)) { 1885 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1886 mComponentName.c_str(), maxWidth, maxHeight); 1887 1888 err = mOMXNode->prepareForAdaptivePlayback( 1889 kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1890 ALOGW_IF(err != OK, 1891 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1892 mComponentName.c_str(), err); 1893 1894 if (err == OK) { 1895 inputFormat->setInt32("max-width", maxWidth); 1896 inputFormat->setInt32("max-height", maxHeight); 1897 inputFormat->setInt32("adaptive-playback", true); 1898 } 1899 } 1900 // allow failure 1901 err = OK; 1902 } else { 1903 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1904 mComponentName.c_str()); 1905 CHECK(storingMetadataInDecodedBuffers()); 1906 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 1907 "legacy-adaptive", !msg->contains("no-experiments")); 1908 1909 inputFormat->setInt32("adaptive-playback", true); 1910 } 1911 1912 int32_t push; 1913 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 1914 && push != 0) { 1915 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1916 } 1917 } 1918 1919 int32_t rotationDegrees; 1920 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 1921 mRotationDegrees = rotationDegrees; 1922 } else { 1923 mRotationDegrees = 0; 1924 } 1925 } 1926 1927 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 1928 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 1929 // invalid encodings will default to PCM-16bit in setupRawAudioFormat. 1930 1931 if (video) { 1932 // determine need for software renderer 1933 bool usingSwRenderer = false; 1934 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 1935 usingSwRenderer = true; 1936 haveNativeWindow = false; 1937 } 1938 1939 if (encoder) { 1940 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 1941 } else { 1942 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 1943 } 1944 1945 if (err != OK) { 1946 return err; 1947 } 1948 1949 if (haveNativeWindow) { 1950 mNativeWindow = static_cast<Surface *>(obj.get()); 1951 } 1952 1953 // initialize native window now to get actual output format 1954 // TODO: this is needed for some encoders even though they don't use native window 1955 err = initNativeWindow(); 1956 if (err != OK) { 1957 return err; 1958 } 1959 1960 // fallback for devices that do not handle flex-YUV for native buffers 1961 if (haveNativeWindow) { 1962 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 1963 if (msg->findInt32("color-format", &requestedColorFormat) && 1964 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 1965 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 1966 if (err != OK) { 1967 return err; 1968 } 1969 int32_t colorFormat = OMX_COLOR_FormatUnused; 1970 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 1971 if (!outputFormat->findInt32("color-format", &colorFormat)) { 1972 ALOGE("ouptut port did not have a color format (wrong domain?)"); 1973 return BAD_VALUE; 1974 } 1975 ALOGD("[%s] Requested output format %#x and got %#x.", 1976 mComponentName.c_str(), requestedColorFormat, colorFormat); 1977 if (!IsFlexibleColorFormat( 1978 mOMXNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 1979 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 1980 // device did not handle flex-YUV request for native window, fall back 1981 // to SW renderer 1982 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 1983 mNativeWindow.clear(); 1984 mNativeWindowUsageBits = 0; 1985 haveNativeWindow = false; 1986 usingSwRenderer = true; 1987 if (storingMetadataInDecodedBuffers()) { 1988 err = mOMXNode->storeMetaDataInBuffers( 1989 kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 1990 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 1991 // TODO: implement adaptive-playback support for bytebuffer mode. 1992 // This is done by SW codecs, but most HW codecs don't support it. 1993 inputFormat->setInt32("adaptive-playback", false); 1994 } 1995 if (err == OK) { 1996 err = mOMXNode->enableNativeBuffers( 1997 kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 1998 } 1999 if (mFlags & kFlagIsGrallocUsageProtected) { 2000 // fallback is not supported for protected playback 2001 err = PERMISSION_DENIED; 2002 } else if (err == OK) { 2003 err = setupVideoDecoder( 2004 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2005 } 2006 } 2007 } 2008 } 2009 2010 if (usingSwRenderer) { 2011 outputFormat->setInt32("using-sw-renderer", 1); 2012 } 2013 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2014 int32_t numChannels, sampleRate; 2015 if (!msg->findInt32("channel-count", &numChannels) 2016 || !msg->findInt32("sample-rate", &sampleRate)) { 2017 // Since we did not always check for these, leave them optional 2018 // and have the decoder figure it all out. 2019 err = OK; 2020 } else { 2021 err = setupRawAudioFormat( 2022 encoder ? kPortIndexInput : kPortIndexOutput, 2023 sampleRate, 2024 numChannels); 2025 } 2026 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2027 int32_t numChannels, sampleRate; 2028 if (!msg->findInt32("channel-count", &numChannels) 2029 || !msg->findInt32("sample-rate", &sampleRate)) { 2030 err = INVALID_OPERATION; 2031 } else { 2032 int32_t isADTS, aacProfile; 2033 int32_t sbrMode; 2034 int32_t maxOutputChannelCount; 2035 int32_t pcmLimiterEnable; 2036 drcParams_t drc; 2037 if (!msg->findInt32("is-adts", &isADTS)) { 2038 isADTS = 0; 2039 } 2040 if (!msg->findInt32("aac-profile", &aacProfile)) { 2041 aacProfile = OMX_AUDIO_AACObjectNull; 2042 } 2043 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2044 sbrMode = -1; 2045 } 2046 2047 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2048 maxOutputChannelCount = -1; 2049 } 2050 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2051 // value is unknown 2052 pcmLimiterEnable = -1; 2053 } 2054 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2055 // value is unknown 2056 drc.encodedTargetLevel = -1; 2057 } 2058 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2059 // value is unknown 2060 drc.drcCut = -1; 2061 } 2062 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2063 // value is unknown 2064 drc.drcBoost = -1; 2065 } 2066 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2067 // value is unknown 2068 drc.heavyCompression = -1; 2069 } 2070 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2071 // value is unknown 2072 drc.targetRefLevel = -1; 2073 } 2074 2075 err = setupAACCodec( 2076 encoder, numChannels, sampleRate, bitRate, aacProfile, 2077 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2078 pcmLimiterEnable); 2079 } 2080 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2081 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2082 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2083 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2084 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2085 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2086 // These are PCM-like formats with a fixed sample rate but 2087 // a variable number of channels. 2088 2089 int32_t numChannels; 2090 if (!msg->findInt32("channel-count", &numChannels)) { 2091 err = INVALID_OPERATION; 2092 } else { 2093 int32_t sampleRate; 2094 if (!msg->findInt32("sample-rate", &sampleRate)) { 2095 sampleRate = 8000; 2096 } 2097 err = setupG711Codec(encoder, sampleRate, numChannels); 2098 } 2099 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2100 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2101 if (encoder && 2102 (!msg->findInt32("channel-count", &numChannels) 2103 || !msg->findInt32("sample-rate", &sampleRate))) { 2104 ALOGE("missing channel count or sample rate for FLAC encoder"); 2105 err = INVALID_OPERATION; 2106 } else { 2107 if (encoder) { 2108 if (!msg->findInt32( 2109 "complexity", &compressionLevel) && 2110 !msg->findInt32( 2111 "flac-compression-level", &compressionLevel)) { 2112 compressionLevel = 5; // default FLAC compression level 2113 } else if (compressionLevel < 0) { 2114 ALOGW("compression level %d outside [0..8] range, " 2115 "using 0", 2116 compressionLevel); 2117 compressionLevel = 0; 2118 } else if (compressionLevel > 8) { 2119 ALOGW("compression level %d outside [0..8] range, " 2120 "using 8", 2121 compressionLevel); 2122 compressionLevel = 8; 2123 } 2124 } 2125 err = setupFlacCodec( 2126 encoder, numChannels, sampleRate, compressionLevel); 2127 } 2128 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2129 int32_t numChannels, sampleRate; 2130 if (encoder 2131 || !msg->findInt32("channel-count", &numChannels) 2132 || !msg->findInt32("sample-rate", &sampleRate)) { 2133 err = INVALID_OPERATION; 2134 } else { 2135 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); 2136 } 2137 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2138 int32_t numChannels; 2139 int32_t sampleRate; 2140 if (!msg->findInt32("channel-count", &numChannels) 2141 || !msg->findInt32("sample-rate", &sampleRate)) { 2142 err = INVALID_OPERATION; 2143 } else { 2144 err = setupAC3Codec(encoder, numChannels, sampleRate); 2145 } 2146 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2147 int32_t numChannels; 2148 int32_t sampleRate; 2149 if (!msg->findInt32("channel-count", &numChannels) 2150 || !msg->findInt32("sample-rate", &sampleRate)) { 2151 err = INVALID_OPERATION; 2152 } else { 2153 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2154 } 2155 } 2156 2157 if (err != OK) { 2158 return err; 2159 } 2160 2161 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2162 mEncoderDelay = 0; 2163 } 2164 2165 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2166 mEncoderPadding = 0; 2167 } 2168 2169 if (msg->findInt32("channel-mask", &mChannelMask)) { 2170 mChannelMaskPresent = true; 2171 } else { 2172 mChannelMaskPresent = false; 2173 } 2174 2175 int32_t maxInputSize; 2176 if (msg->findInt32("max-input-size", &maxInputSize)) { 2177 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2178 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2179 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2180 } 2181 2182 int32_t priority; 2183 if (msg->findInt32("priority", &priority)) { 2184 err = setPriority(priority); 2185 } 2186 2187 int32_t rateInt = -1; 2188 float rateFloat = -1; 2189 if (!msg->findFloat("operating-rate", &rateFloat)) { 2190 msg->findInt32("operating-rate", &rateInt); 2191 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2192 } 2193 if (rateFloat > 0) { 2194 err = setOperatingRate(rateFloat, video); 2195 } 2196 2197 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame. 2198 mBaseOutputFormat = outputFormat; 2199 // trigger a kWhatOutputFormatChanged msg on first buffer 2200 mLastOutputFormat.clear(); 2201 2202 err = getPortFormat(kPortIndexInput, inputFormat); 2203 if (err == OK) { 2204 err = getPortFormat(kPortIndexOutput, outputFormat); 2205 if (err == OK) { 2206 mInputFormat = inputFormat; 2207 mOutputFormat = outputFormat; 2208 } 2209 } 2210 2211 // create data converters if needed 2212 if (!video && err == OK) { 2213 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 2214 if (encoder) { 2215 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2216 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); 2217 if (mConverter[kPortIndexInput] != NULL) { 2218 mInputFormat->setInt32("pcm-encoding", pcmEncoding); 2219 } 2220 } else { 2221 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2222 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 2223 if (mConverter[kPortIndexOutput] != NULL) { 2224 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 2225 } 2226 } 2227 } 2228 2229 return err; 2230} 2231 2232status_t ACodec::setPriority(int32_t priority) { 2233 if (priority < 0) { 2234 return BAD_VALUE; 2235 } 2236 OMX_PARAM_U32TYPE config; 2237 InitOMXParams(&config); 2238 config.nU32 = (OMX_U32)priority; 2239 status_t temp = mOMXNode->setConfig( 2240 (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2241 &config, sizeof(config)); 2242 if (temp != OK) { 2243 ALOGI("codec does not support config priority (err %d)", temp); 2244 } 2245 return OK; 2246} 2247 2248status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2249 if (rateFloat < 0) { 2250 return BAD_VALUE; 2251 } 2252 OMX_U32 rate; 2253 if (isVideo) { 2254 if (rateFloat > 65535) { 2255 return BAD_VALUE; 2256 } 2257 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2258 } else { 2259 if (rateFloat > UINT_MAX) { 2260 return BAD_VALUE; 2261 } 2262 rate = (OMX_U32)(rateFloat); 2263 } 2264 OMX_PARAM_U32TYPE config; 2265 InitOMXParams(&config); 2266 config.nU32 = rate; 2267 status_t err = mOMXNode->setConfig( 2268 (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2269 &config, sizeof(config)); 2270 if (err != OK) { 2271 ALOGI("codec does not support config operating rate (err %d)", err); 2272 } 2273 return OK; 2274} 2275 2276status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2277 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2278 InitOMXParams(¶ms); 2279 params.nPortIndex = kPortIndexOutput; 2280 status_t err = mOMXNode->getConfig( 2281 (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2282 if (err == OK) { 2283 *intraRefreshPeriod = params.nRefreshPeriod; 2284 return OK; 2285 } 2286 2287 // Fallback to query through standard OMX index. 2288 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2289 InitOMXParams(&refreshParams); 2290 refreshParams.nPortIndex = kPortIndexOutput; 2291 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2292 err = mOMXNode->getParameter( 2293 OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2294 if (err != OK || refreshParams.nCirMBs == 0) { 2295 *intraRefreshPeriod = 0; 2296 return OK; 2297 } 2298 2299 // Calculate period based on width and height 2300 uint32_t width, height; 2301 OMX_PARAM_PORTDEFINITIONTYPE def; 2302 InitOMXParams(&def); 2303 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2304 def.nPortIndex = kPortIndexOutput; 2305 err = mOMXNode->getParameter( 2306 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2307 if (err != OK) { 2308 *intraRefreshPeriod = 0; 2309 return err; 2310 } 2311 width = video_def->nFrameWidth; 2312 height = video_def->nFrameHeight; 2313 // Use H.264/AVC MacroBlock size 16x16 2314 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2315 2316 return OK; 2317} 2318 2319status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2320 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2321 InitOMXParams(¶ms); 2322 params.nPortIndex = kPortIndexOutput; 2323 params.nRefreshPeriod = intraRefreshPeriod; 2324 status_t err = mOMXNode->setConfig( 2325 (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2326 if (err == OK) { 2327 return OK; 2328 } 2329 2330 // Only in configure state, a component could invoke setParameter. 2331 if (!inConfigure) { 2332 return INVALID_OPERATION; 2333 } else { 2334 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2335 } 2336 2337 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2338 InitOMXParams(&refreshParams); 2339 refreshParams.nPortIndex = kPortIndexOutput; 2340 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2341 2342 if (intraRefreshPeriod == 0) { 2343 // 0 means disable intra refresh. 2344 refreshParams.nCirMBs = 0; 2345 } else { 2346 // Calculate macroblocks that need to be intra coded base on width and height 2347 uint32_t width, height; 2348 OMX_PARAM_PORTDEFINITIONTYPE def; 2349 InitOMXParams(&def); 2350 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2351 def.nPortIndex = kPortIndexOutput; 2352 err = mOMXNode->getParameter( 2353 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2354 if (err != OK) { 2355 return err; 2356 } 2357 width = video_def->nFrameWidth; 2358 height = video_def->nFrameHeight; 2359 // Use H.264/AVC MacroBlock size 16x16 2360 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2361 } 2362 2363 err = mOMXNode->setParameter( 2364 OMX_IndexParamVideoIntraRefresh, 2365 &refreshParams, sizeof(refreshParams)); 2366 if (err != OK) { 2367 return err; 2368 } 2369 2370 return OK; 2371} 2372 2373status_t ACodec::configureTemporalLayers( 2374 const sp<AMessage> &msg, bool inConfigure, sp<AMessage> &outputFormat) { 2375 if (!mIsVideo || !mIsEncoder) { 2376 return INVALID_OPERATION; 2377 } 2378 2379 AString tsSchema; 2380 if (!msg->findString("ts-schema", &tsSchema)) { 2381 return OK; 2382 } 2383 2384 unsigned int numLayers = 0; 2385 unsigned int numBLayers = 0; 2386 int tags; 2387 char dummy; 2388 OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE pattern = 2389 OMX_VIDEO_AndroidTemporalLayeringPatternNone; 2390 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1 2391 && numLayers > 0) { 2392 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC; 2393 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c", 2394 &numLayers, &dummy, &numBLayers, &dummy)) 2395 && (tags == 1 || (tags == 3 && dummy == '+')) 2396 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) { 2397 numLayers += numBLayers; 2398 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid; 2399 } else { 2400 ALOGI("Ignoring unsupported ts-schema [%s]", tsSchema.c_str()); 2401 return BAD_VALUE; 2402 } 2403 2404 OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layerParams; 2405 InitOMXParams(&layerParams); 2406 layerParams.nPortIndex = kPortIndexOutput; 2407 2408 status_t err = mOMXNode->getParameter( 2409 (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2410 &layerParams, sizeof(layerParams)); 2411 2412 if (err != OK) { 2413 return err; 2414 } else if (!(layerParams.eSupportedPatterns & pattern)) { 2415 return BAD_VALUE; 2416 } 2417 2418 numLayers = min(numLayers, layerParams.nLayerCountMax); 2419 numBLayers = min(numBLayers, layerParams.nBLayerCountMax); 2420 2421 if (!inConfigure) { 2422 OMX_VIDEO_CONFIG_ANDROID_TEMPORALLAYERINGTYPE layerConfig; 2423 InitOMXParams(&layerConfig); 2424 layerConfig.nPortIndex = kPortIndexOutput; 2425 layerConfig.ePattern = pattern; 2426 layerConfig.nPLayerCountActual = numLayers - numBLayers; 2427 layerConfig.nBLayerCountActual = numBLayers; 2428 layerConfig.bBitrateRatiosSpecified = OMX_FALSE; 2429 2430 err = mOMXNode->setConfig( 2431 (OMX_INDEXTYPE)OMX_IndexConfigAndroidVideoTemporalLayering, 2432 &layerConfig, sizeof(layerConfig)); 2433 } else { 2434 layerParams.ePattern = pattern; 2435 layerParams.nPLayerCountActual = numLayers - numBLayers; 2436 layerParams.nBLayerCountActual = numBLayers; 2437 layerParams.bBitrateRatiosSpecified = OMX_FALSE; 2438 2439 err = mOMXNode->setParameter( 2440 (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2441 &layerParams, sizeof(layerParams)); 2442 } 2443 2444 AString configSchema; 2445 if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternAndroid) { 2446 configSchema = AStringPrintf("android.generic.%u+%u", numLayers - numBLayers, numBLayers); 2447 } else if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC) { 2448 configSchema = AStringPrintf("webrtc.vp8.%u", numLayers); 2449 } 2450 2451 if (err != OK) { 2452 ALOGW("Failed to set temporal layers to %s (requested %s)", 2453 configSchema.c_str(), tsSchema.c_str()); 2454 return err; 2455 } 2456 2457 err = mOMXNode->getParameter( 2458 (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2459 &layerParams, sizeof(layerParams)); 2460 2461 if (err == OK) { 2462 ALOGD("Temporal layers requested:%s configured:%s got:%s(%u: P=%u, B=%u)", 2463 tsSchema.c_str(), configSchema.c_str(), 2464 asString(layerParams.ePattern), layerParams.ePattern, 2465 layerParams.nPLayerCountActual, layerParams.nBLayerCountActual); 2466 2467 if (outputFormat.get() == mOutputFormat.get()) { 2468 mOutputFormat = mOutputFormat->dup(); // trigger an output format change event 2469 } 2470 // assume we got what we configured 2471 outputFormat->setString("ts-schema", configSchema); 2472 } 2473 return err; 2474} 2475 2476status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2477 OMX_PARAM_PORTDEFINITIONTYPE def; 2478 InitOMXParams(&def); 2479 def.nPortIndex = portIndex; 2480 2481 status_t err = mOMXNode->getParameter( 2482 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2483 2484 if (err != OK) { 2485 return err; 2486 } 2487 2488 if (def.nBufferSize >= size) { 2489 return OK; 2490 } 2491 2492 def.nBufferSize = size; 2493 2494 err = mOMXNode->setParameter( 2495 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2496 2497 if (err != OK) { 2498 return err; 2499 } 2500 2501 err = mOMXNode->getParameter( 2502 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2503 2504 if (err != OK) { 2505 return err; 2506 } 2507 2508 if (def.nBufferSize < size) { 2509 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2510 return FAILED_TRANSACTION; 2511 } 2512 2513 return OK; 2514} 2515 2516status_t ACodec::selectAudioPortFormat( 2517 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2518 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2519 InitOMXParams(&format); 2520 2521 format.nPortIndex = portIndex; 2522 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2523 format.nIndex = index; 2524 status_t err = mOMXNode->getParameter( 2525 OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2526 2527 if (err != OK) { 2528 return err; 2529 } 2530 2531 if (format.eEncoding == desiredFormat) { 2532 break; 2533 } 2534 2535 if (index == kMaxIndicesToCheck) { 2536 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2537 mComponentName.c_str(), index, 2538 asString(format.eEncoding), format.eEncoding); 2539 return ERROR_UNSUPPORTED; 2540 } 2541 } 2542 2543 return mOMXNode->setParameter( 2544 OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2545} 2546 2547status_t ACodec::setupAACCodec( 2548 bool encoder, int32_t numChannels, int32_t sampleRate, 2549 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2550 int32_t maxOutputChannelCount, const drcParams_t& drc, 2551 int32_t pcmLimiterEnable) { 2552 if (encoder && isADTS) { 2553 return -EINVAL; 2554 } 2555 2556 status_t err = setupRawAudioFormat( 2557 encoder ? kPortIndexInput : kPortIndexOutput, 2558 sampleRate, 2559 numChannels); 2560 2561 if (err != OK) { 2562 return err; 2563 } 2564 2565 if (encoder) { 2566 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2567 2568 if (err != OK) { 2569 return err; 2570 } 2571 2572 OMX_PARAM_PORTDEFINITIONTYPE def; 2573 InitOMXParams(&def); 2574 def.nPortIndex = kPortIndexOutput; 2575 2576 err = mOMXNode->getParameter( 2577 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2578 2579 if (err != OK) { 2580 return err; 2581 } 2582 2583 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2584 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2585 2586 err = mOMXNode->setParameter( 2587 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2588 2589 if (err != OK) { 2590 return err; 2591 } 2592 2593 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2594 InitOMXParams(&profile); 2595 profile.nPortIndex = kPortIndexOutput; 2596 2597 err = mOMXNode->getParameter( 2598 OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2599 2600 if (err != OK) { 2601 return err; 2602 } 2603 2604 profile.nChannels = numChannels; 2605 2606 profile.eChannelMode = 2607 (numChannels == 1) 2608 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2609 2610 profile.nSampleRate = sampleRate; 2611 profile.nBitRate = bitRate; 2612 profile.nAudioBandWidth = 0; 2613 profile.nFrameLength = 0; 2614 profile.nAACtools = OMX_AUDIO_AACToolAll; 2615 profile.nAACERtools = OMX_AUDIO_AACERNone; 2616 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2617 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2618 switch (sbrMode) { 2619 case 0: 2620 // disable sbr 2621 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2622 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2623 break; 2624 case 1: 2625 // enable single-rate sbr 2626 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2627 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2628 break; 2629 case 2: 2630 // enable dual-rate sbr 2631 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2632 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2633 break; 2634 case -1: 2635 // enable both modes -> the codec will decide which mode should be used 2636 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2637 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2638 break; 2639 default: 2640 // unsupported sbr mode 2641 return BAD_VALUE; 2642 } 2643 2644 2645 err = mOMXNode->setParameter( 2646 OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2647 2648 if (err != OK) { 2649 return err; 2650 } 2651 2652 return err; 2653 } 2654 2655 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2656 InitOMXParams(&profile); 2657 profile.nPortIndex = kPortIndexInput; 2658 2659 err = mOMXNode->getParameter( 2660 OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2661 2662 if (err != OK) { 2663 return err; 2664 } 2665 2666 profile.nChannels = numChannels; 2667 profile.nSampleRate = sampleRate; 2668 2669 profile.eAACStreamFormat = 2670 isADTS 2671 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2672 : OMX_AUDIO_AACStreamFormatMP4FF; 2673 2674 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2675 InitOMXParams(&presentation); 2676 presentation.nMaxOutputChannels = maxOutputChannelCount; 2677 presentation.nDrcCut = drc.drcCut; 2678 presentation.nDrcBoost = drc.drcBoost; 2679 presentation.nHeavyCompression = drc.heavyCompression; 2680 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2681 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2682 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2683 2684 status_t res = mOMXNode->setParameter( 2685 OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2686 if (res == OK) { 2687 // optional parameters, will not cause configuration failure 2688 mOMXNode->setParameter( 2689 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2690 &presentation, sizeof(presentation)); 2691 } else { 2692 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2693 } 2694 mSampleRate = sampleRate; 2695 return res; 2696} 2697 2698status_t ACodec::setupAC3Codec( 2699 bool encoder, int32_t numChannels, int32_t sampleRate) { 2700 status_t err = setupRawAudioFormat( 2701 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2702 2703 if (err != OK) { 2704 return err; 2705 } 2706 2707 if (encoder) { 2708 ALOGW("AC3 encoding is not supported."); 2709 return INVALID_OPERATION; 2710 } 2711 2712 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2713 InitOMXParams(&def); 2714 def.nPortIndex = kPortIndexInput; 2715 2716 err = mOMXNode->getParameter( 2717 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, &def, sizeof(def)); 2718 2719 if (err != OK) { 2720 return err; 2721 } 2722 2723 def.nChannels = numChannels; 2724 def.nSampleRate = sampleRate; 2725 2726 return mOMXNode->setParameter( 2727 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, &def, sizeof(def)); 2728} 2729 2730status_t ACodec::setupEAC3Codec( 2731 bool encoder, int32_t numChannels, int32_t sampleRate) { 2732 status_t err = setupRawAudioFormat( 2733 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2734 2735 if (err != OK) { 2736 return err; 2737 } 2738 2739 if (encoder) { 2740 ALOGW("EAC3 encoding is not supported."); 2741 return INVALID_OPERATION; 2742 } 2743 2744 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2745 InitOMXParams(&def); 2746 def.nPortIndex = kPortIndexInput; 2747 2748 err = mOMXNode->getParameter( 2749 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, &def, sizeof(def)); 2750 2751 if (err != OK) { 2752 return err; 2753 } 2754 2755 def.nChannels = numChannels; 2756 def.nSampleRate = sampleRate; 2757 2758 return mOMXNode->setParameter( 2759 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, &def, sizeof(def)); 2760} 2761 2762static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2763 bool isAMRWB, int32_t bps) { 2764 if (isAMRWB) { 2765 if (bps <= 6600) { 2766 return OMX_AUDIO_AMRBandModeWB0; 2767 } else if (bps <= 8850) { 2768 return OMX_AUDIO_AMRBandModeWB1; 2769 } else if (bps <= 12650) { 2770 return OMX_AUDIO_AMRBandModeWB2; 2771 } else if (bps <= 14250) { 2772 return OMX_AUDIO_AMRBandModeWB3; 2773 } else if (bps <= 15850) { 2774 return OMX_AUDIO_AMRBandModeWB4; 2775 } else if (bps <= 18250) { 2776 return OMX_AUDIO_AMRBandModeWB5; 2777 } else if (bps <= 19850) { 2778 return OMX_AUDIO_AMRBandModeWB6; 2779 } else if (bps <= 23050) { 2780 return OMX_AUDIO_AMRBandModeWB7; 2781 } 2782 2783 // 23850 bps 2784 return OMX_AUDIO_AMRBandModeWB8; 2785 } else { // AMRNB 2786 if (bps <= 4750) { 2787 return OMX_AUDIO_AMRBandModeNB0; 2788 } else if (bps <= 5150) { 2789 return OMX_AUDIO_AMRBandModeNB1; 2790 } else if (bps <= 5900) { 2791 return OMX_AUDIO_AMRBandModeNB2; 2792 } else if (bps <= 6700) { 2793 return OMX_AUDIO_AMRBandModeNB3; 2794 } else if (bps <= 7400) { 2795 return OMX_AUDIO_AMRBandModeNB4; 2796 } else if (bps <= 7950) { 2797 return OMX_AUDIO_AMRBandModeNB5; 2798 } else if (bps <= 10200) { 2799 return OMX_AUDIO_AMRBandModeNB6; 2800 } 2801 2802 // 12200 bps 2803 return OMX_AUDIO_AMRBandModeNB7; 2804 } 2805} 2806 2807status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2808 OMX_AUDIO_PARAM_AMRTYPE def; 2809 InitOMXParams(&def); 2810 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2811 2812 status_t err = mOMXNode->getParameter( 2813 OMX_IndexParamAudioAmr, &def, sizeof(def)); 2814 2815 if (err != OK) { 2816 return err; 2817 } 2818 2819 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2820 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2821 2822 err = mOMXNode->setParameter( 2823 OMX_IndexParamAudioAmr, &def, sizeof(def)); 2824 2825 if (err != OK) { 2826 return err; 2827 } 2828 2829 return setupRawAudioFormat( 2830 encoder ? kPortIndexInput : kPortIndexOutput, 2831 isWAMR ? 16000 : 8000 /* sampleRate */, 2832 1 /* numChannels */); 2833} 2834 2835status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2836 if (encoder) { 2837 return INVALID_OPERATION; 2838 } 2839 2840 return setupRawAudioFormat( 2841 kPortIndexInput, sampleRate, numChannels); 2842} 2843 2844status_t ACodec::setupFlacCodec( 2845 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2846 2847 if (encoder) { 2848 OMX_AUDIO_PARAM_FLACTYPE def; 2849 InitOMXParams(&def); 2850 def.nPortIndex = kPortIndexOutput; 2851 2852 // configure compression level 2853 status_t err = mOMXNode->getParameter(OMX_IndexParamAudioFlac, &def, sizeof(def)); 2854 if (err != OK) { 2855 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2856 return err; 2857 } 2858 def.nCompressionLevel = compressionLevel; 2859 err = mOMXNode->setParameter(OMX_IndexParamAudioFlac, &def, sizeof(def)); 2860 if (err != OK) { 2861 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2862 return err; 2863 } 2864 } 2865 2866 return setupRawAudioFormat( 2867 encoder ? kPortIndexInput : kPortIndexOutput, 2868 sampleRate, 2869 numChannels); 2870} 2871 2872status_t ACodec::setupRawAudioFormat( 2873 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { 2874 OMX_PARAM_PORTDEFINITIONTYPE def; 2875 InitOMXParams(&def); 2876 def.nPortIndex = portIndex; 2877 2878 status_t err = mOMXNode->getParameter( 2879 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2880 2881 if (err != OK) { 2882 return err; 2883 } 2884 2885 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2886 2887 err = mOMXNode->setParameter( 2888 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2889 2890 if (err != OK) { 2891 return err; 2892 } 2893 2894 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2895 InitOMXParams(&pcmParams); 2896 pcmParams.nPortIndex = portIndex; 2897 2898 err = mOMXNode->getParameter( 2899 OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2900 2901 if (err != OK) { 2902 return err; 2903 } 2904 2905 pcmParams.nChannels = numChannels; 2906 switch (encoding) { 2907 case kAudioEncodingPcm8bit: 2908 pcmParams.eNumData = OMX_NumericalDataUnsigned; 2909 pcmParams.nBitPerSample = 8; 2910 break; 2911 case kAudioEncodingPcmFloat: 2912 pcmParams.eNumData = OMX_NumericalDataFloat; 2913 pcmParams.nBitPerSample = 32; 2914 break; 2915 case kAudioEncodingPcm16bit: 2916 pcmParams.eNumData = OMX_NumericalDataSigned; 2917 pcmParams.nBitPerSample = 16; 2918 break; 2919 default: 2920 return BAD_VALUE; 2921 } 2922 pcmParams.bInterleaved = OMX_TRUE; 2923 pcmParams.nSamplingRate = sampleRate; 2924 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 2925 2926 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 2927 return OMX_ErrorNone; 2928 } 2929 2930 err = mOMXNode->setParameter( 2931 OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2932 // if we could not set up raw format to non-16-bit, try with 16-bit 2933 // NOTE: we will also verify this via readback, in case codec ignores these fields 2934 if (err != OK && encoding != kAudioEncodingPcm16bit) { 2935 pcmParams.eNumData = OMX_NumericalDataSigned; 2936 pcmParams.nBitPerSample = 16; 2937 err = mOMXNode->setParameter( 2938 OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2939 } 2940 return err; 2941} 2942 2943status_t ACodec::configureTunneledVideoPlayback( 2944 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 2945 native_handle_t* sidebandHandle; 2946 2947 status_t err = mOMXNode->configureVideoTunnelMode( 2948 kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 2949 if (err != OK) { 2950 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 2951 return err; 2952 } 2953 2954 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 2955 if (err != OK) { 2956 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 2957 sidebandHandle, err); 2958 return err; 2959 } 2960 2961 return OK; 2962} 2963 2964status_t ACodec::setVideoPortFormatType( 2965 OMX_U32 portIndex, 2966 OMX_VIDEO_CODINGTYPE compressionFormat, 2967 OMX_COLOR_FORMATTYPE colorFormat, 2968 bool usingNativeBuffers) { 2969 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2970 InitOMXParams(&format); 2971 format.nPortIndex = portIndex; 2972 format.nIndex = 0; 2973 bool found = false; 2974 2975 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2976 format.nIndex = index; 2977 status_t err = mOMXNode->getParameter( 2978 OMX_IndexParamVideoPortFormat, 2979 &format, sizeof(format)); 2980 2981 if (err != OK) { 2982 return err; 2983 } 2984 2985 // substitute back flexible color format to codec supported format 2986 OMX_U32 flexibleEquivalent; 2987 if (compressionFormat == OMX_VIDEO_CodingUnused 2988 && IsFlexibleColorFormat( 2989 mOMXNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 2990 && colorFormat == flexibleEquivalent) { 2991 ALOGI("[%s] using color format %#x in place of %#x", 2992 mComponentName.c_str(), format.eColorFormat, colorFormat); 2993 colorFormat = format.eColorFormat; 2994 } 2995 2996 // The following assertion is violated by TI's video decoder. 2997 // CHECK_EQ(format.nIndex, index); 2998 2999 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 3000 if (portIndex == kPortIndexInput 3001 && colorFormat == format.eColorFormat) { 3002 // eCompressionFormat does not seem right. 3003 found = true; 3004 break; 3005 } 3006 if (portIndex == kPortIndexOutput 3007 && compressionFormat == format.eCompressionFormat) { 3008 // eColorFormat does not seem right. 3009 found = true; 3010 break; 3011 } 3012 } 3013 3014 if (format.eCompressionFormat == compressionFormat 3015 && format.eColorFormat == colorFormat) { 3016 found = true; 3017 break; 3018 } 3019 3020 if (index == kMaxIndicesToCheck) { 3021 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 3022 mComponentName.c_str(), index, 3023 asString(format.eCompressionFormat), format.eCompressionFormat, 3024 asString(format.eColorFormat), format.eColorFormat); 3025 } 3026 } 3027 3028 if (!found) { 3029 return UNKNOWN_ERROR; 3030 } 3031 3032 status_t err = mOMXNode->setParameter( 3033 OMX_IndexParamVideoPortFormat, &format, sizeof(format)); 3034 3035 return err; 3036} 3037 3038// Set optimal output format. OMX component lists output formats in the order 3039// of preference, but this got more complicated since the introduction of flexible 3040// YUV formats. We support a legacy behavior for applications that do not use 3041// surface output, do not specify an output format, but expect a "usable" standard 3042// OMX format. SW readable and standard formats must be flex-YUV. 3043// 3044// Suggested preference order: 3045// - optimal format for texture rendering (mediaplayer behavior) 3046// - optimal SW readable & texture renderable format (flex-YUV support) 3047// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 3048// - legacy "usable" standard formats 3049// 3050// For legacy support, we prefer a standard format, but will settle for a SW readable 3051// flex-YUV format. 3052status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 3053 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 3054 InitOMXParams(&format); 3055 format.nPortIndex = kPortIndexOutput; 3056 3057 InitOMXParams(&legacyFormat); 3058 // this field will change when we find a suitable legacy format 3059 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 3060 3061 for (OMX_U32 index = 0; ; ++index) { 3062 format.nIndex = index; 3063 status_t err = mOMXNode->getParameter( 3064 OMX_IndexParamVideoPortFormat, &format, sizeof(format)); 3065 if (err != OK) { 3066 // no more formats, pick legacy format if found 3067 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 3068 memcpy(&format, &legacyFormat, sizeof(format)); 3069 break; 3070 } 3071 return err; 3072 } 3073 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 3074 return OMX_ErrorBadParameter; 3075 } 3076 if (!getLegacyFlexibleFormat) { 3077 break; 3078 } 3079 // standard formats that were exposed to users before 3080 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 3081 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 3082 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 3083 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 3084 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 3085 break; 3086 } 3087 // find best legacy non-standard format 3088 OMX_U32 flexibleEquivalent; 3089 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 3090 && IsFlexibleColorFormat( 3091 mOMXNode, format.eColorFormat, false /* usingNativeBuffers */, 3092 &flexibleEquivalent) 3093 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 3094 memcpy(&legacyFormat, &format, sizeof(format)); 3095 } 3096 } 3097 return mOMXNode->setParameter( 3098 OMX_IndexParamVideoPortFormat, &format, sizeof(format)); 3099} 3100 3101static const struct VideoCodingMapEntry { 3102 const char *mMime; 3103 OMX_VIDEO_CODINGTYPE mVideoCodingType; 3104} kVideoCodingMapEntry[] = { 3105 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 3106 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 3107 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 3108 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 3109 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 3110 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 3111 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 3112 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 3113}; 3114 3115static status_t GetVideoCodingTypeFromMime( 3116 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3117 for (size_t i = 0; 3118 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3119 ++i) { 3120 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3121 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3122 return OK; 3123 } 3124 } 3125 3126 *codingType = OMX_VIDEO_CodingUnused; 3127 3128 return ERROR_UNSUPPORTED; 3129} 3130 3131static status_t GetMimeTypeForVideoCoding( 3132 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3133 for (size_t i = 0; 3134 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3135 ++i) { 3136 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3137 *mime = kVideoCodingMapEntry[i].mMime; 3138 return OK; 3139 } 3140 } 3141 3142 mime->clear(); 3143 3144 return ERROR_UNSUPPORTED; 3145} 3146 3147status_t ACodec::setPortBufferNum(OMX_U32 portIndex, int bufferNum) { 3148 OMX_PARAM_PORTDEFINITIONTYPE def; 3149 InitOMXParams(&def); 3150 def.nPortIndex = portIndex; 3151 status_t err; 3152 ALOGD("Setting [%s] %s port buffer number: %d", mComponentName.c_str(), 3153 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3154 err = mOMXNode->getParameter( 3155 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3156 if (err != OK) { 3157 return err; 3158 } 3159 def.nBufferCountActual = bufferNum; 3160 err = mOMXNode->setParameter( 3161 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3162 if (err != OK) { 3163 // Component could reject this request. 3164 ALOGW("Fail to set [%s] %s port buffer number: %d", mComponentName.c_str(), 3165 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3166 } 3167 return OK; 3168} 3169 3170status_t ACodec::setupVideoDecoder( 3171 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3172 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3173 int32_t width, height; 3174 if (!msg->findInt32("width", &width) 3175 || !msg->findInt32("height", &height)) { 3176 return INVALID_OPERATION; 3177 } 3178 3179 OMX_VIDEO_CODINGTYPE compressionFormat; 3180 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3181 3182 if (err != OK) { 3183 return err; 3184 } 3185 3186 if (compressionFormat == OMX_VIDEO_CodingVP9) { 3187 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 3188 InitOMXParams(¶ms); 3189 params.nPortIndex = kPortIndexInput; 3190 // Check if VP9 decoder advertises supported profiles. 3191 params.nProfileIndex = 0; 3192 status_t err = mOMXNode->getParameter( 3193 OMX_IndexParamVideoProfileLevelQuerySupported, 3194 ¶ms, sizeof(params)); 3195 mIsLegacyVP9Decoder = err != OK; 3196 } 3197 3198 err = setVideoPortFormatType( 3199 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3200 3201 if (err != OK) { 3202 return err; 3203 } 3204 3205 int32_t tmp; 3206 if (msg->findInt32("color-format", &tmp)) { 3207 OMX_COLOR_FORMATTYPE colorFormat = 3208 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3209 err = setVideoPortFormatType( 3210 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3211 if (err != OK) { 3212 ALOGW("[%s] does not support color format %d", 3213 mComponentName.c_str(), colorFormat); 3214 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3215 } 3216 } else { 3217 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3218 } 3219 3220 if (err != OK) { 3221 return err; 3222 } 3223 3224 // Set the component input buffer number to be |tmp|. If succeed, 3225 // component will set input port buffer number to be |tmp|. If fail, 3226 // component will keep the same buffer number as before. 3227 if (msg->findInt32("android._num-input-buffers", &tmp)) { 3228 err = setPortBufferNum(kPortIndexInput, tmp); 3229 if (err != OK) 3230 return err; 3231 } 3232 3233 // Set the component output buffer number to be |tmp|. If succeed, 3234 // component will set output port buffer number to be |tmp|. If fail, 3235 // component will keep the same buffer number as before. 3236 if (msg->findInt32("android._num-output-buffers", &tmp)) { 3237 err = setPortBufferNum(kPortIndexOutput, tmp); 3238 if (err != OK) 3239 return err; 3240 } 3241 3242 int32_t frameRateInt; 3243 float frameRateFloat; 3244 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3245 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3246 frameRateInt = -1; 3247 } 3248 frameRateFloat = (float)frameRateInt; 3249 } 3250 3251 err = setVideoFormatOnPort( 3252 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3253 3254 if (err != OK) { 3255 return err; 3256 } 3257 3258 err = setVideoFormatOnPort( 3259 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3260 3261 if (err != OK) { 3262 return err; 3263 } 3264 3265 err = setColorAspectsForVideoDecoder( 3266 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3267 if (err == ERROR_UNSUPPORTED) { // support is optional 3268 err = OK; 3269 } 3270 3271 if (err != OK) { 3272 return err; 3273 } 3274 3275 err = setHDRStaticInfoForVideoCodec(kPortIndexOutput, msg, outputFormat); 3276 if (err == ERROR_UNSUPPORTED) { // support is optional 3277 err = OK; 3278 } 3279 return err; 3280} 3281 3282status_t ACodec::initDescribeColorAspectsIndex() { 3283 status_t err = mOMXNode->getExtensionIndex( 3284 "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3285 if (err != OK) { 3286 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3287 } 3288 return err; 3289} 3290 3291status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3292 status_t err = ERROR_UNSUPPORTED; 3293 if (mDescribeColorAspectsIndex) { 3294 err = mOMXNode->setConfig(mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3295 } 3296 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3297 mComponentName.c_str(), 3298 params.sAspects.mRange, asString(params.sAspects.mRange), 3299 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3300 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3301 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3302 err, asString(err)); 3303 3304 if (verify && err == OK) { 3305 err = getCodecColorAspects(params); 3306 } 3307 3308 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3309 "[%s] setting color aspects failed even though codec advertises support", 3310 mComponentName.c_str()); 3311 return err; 3312} 3313 3314status_t ACodec::setColorAspectsForVideoDecoder( 3315 int32_t width, int32_t height, bool usingNativeWindow, 3316 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3317 DescribeColorAspectsParams params; 3318 InitOMXParams(¶ms); 3319 params.nPortIndex = kPortIndexOutput; 3320 3321 getColorAspectsFromFormat(configFormat, params.sAspects); 3322 if (usingNativeWindow) { 3323 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3324 // The default aspects will be set back to the output format during the 3325 // getFormat phase of configure(). Set non-Unspecified values back into the 3326 // format, in case component does not support this enumeration. 3327 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3328 } 3329 3330 (void)initDescribeColorAspectsIndex(); 3331 3332 // communicate color aspects to codec 3333 return setCodecColorAspects(params); 3334} 3335 3336status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3337 status_t err = ERROR_UNSUPPORTED; 3338 if (mDescribeColorAspectsIndex) { 3339 err = mOMXNode->getConfig(mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3340 } 3341 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3342 mComponentName.c_str(), 3343 params.sAspects.mRange, asString(params.sAspects.mRange), 3344 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3345 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3346 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3347 err, asString(err)); 3348 if (params.bRequestingDataSpace) { 3349 ALOGV("for dataspace %#x", params.nDataSpace); 3350 } 3351 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3352 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3353 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3354 mComponentName.c_str()); 3355 } 3356 return err; 3357} 3358 3359status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3360 DescribeColorAspectsParams params; 3361 InitOMXParams(¶ms); 3362 params.nPortIndex = kPortIndexInput; 3363 status_t err = getCodecColorAspects(params); 3364 if (err == OK) { 3365 // we only set encoder input aspects if codec supports them 3366 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3367 } 3368 return err; 3369} 3370 3371status_t ACodec::getDataSpace( 3372 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3373 bool tryCodec) { 3374 status_t err = OK; 3375 if (tryCodec) { 3376 // request dataspace guidance from codec. 3377 params.bRequestingDataSpace = OMX_TRUE; 3378 err = getCodecColorAspects(params); 3379 params.bRequestingDataSpace = OMX_FALSE; 3380 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3381 *dataSpace = (android_dataspace)params.nDataSpace; 3382 return err; 3383 } else if (err == ERROR_UNSUPPORTED) { 3384 // ignore not-implemented error for dataspace requests 3385 err = OK; 3386 } 3387 } 3388 3389 // this returns legacy versions if available 3390 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3391 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3392 "and dataspace %#x", 3393 mComponentName.c_str(), 3394 params.sAspects.mRange, asString(params.sAspects.mRange), 3395 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3396 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3397 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3398 *dataSpace); 3399 return err; 3400} 3401 3402 3403status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3404 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3405 android_dataspace *dataSpace) { 3406 DescribeColorAspectsParams params; 3407 InitOMXParams(¶ms); 3408 params.nPortIndex = kPortIndexOutput; 3409 3410 // reset default format and get resulting format 3411 getColorAspectsFromFormat(configFormat, params.sAspects); 3412 if (dataSpace != NULL) { 3413 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3414 } 3415 status_t err = setCodecColorAspects(params, true /* readBack */); 3416 3417 // we always set specified aspects for decoders 3418 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3419 3420 if (dataSpace != NULL) { 3421 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3422 if (err == OK) { 3423 err = res; 3424 } 3425 } 3426 3427 return err; 3428} 3429 3430// initial video encoder setup for bytebuffer mode 3431status_t ACodec::setColorAspectsForVideoEncoder( 3432 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3433 // copy config to output format as this is not exposed via getFormat 3434 copyColorConfig(configFormat, outputFormat); 3435 3436 DescribeColorAspectsParams params; 3437 InitOMXParams(¶ms); 3438 params.nPortIndex = kPortIndexInput; 3439 getColorAspectsFromFormat(configFormat, params.sAspects); 3440 3441 (void)initDescribeColorAspectsIndex(); 3442 3443 int32_t usingRecorder; 3444 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3445 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3446 int32_t width, height; 3447 if (configFormat->findInt32("width", &width) 3448 && configFormat->findInt32("height", &height)) { 3449 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3450 status_t err = getDataSpace( 3451 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3452 if (err != OK) { 3453 return err; 3454 } 3455 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3456 } 3457 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3458 } 3459 3460 // communicate color aspects to codec, but do not allow change of the platform aspects 3461 ColorAspects origAspects = params.sAspects; 3462 for (int triesLeft = 2; --triesLeft >= 0; ) { 3463 status_t err = setCodecColorAspects(params, true /* readBack */); 3464 if (err != OK 3465 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3466 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3467 return err; 3468 } 3469 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3470 mComponentName.c_str()); 3471 } 3472 return OK; 3473} 3474 3475status_t ACodec::setHDRStaticInfoForVideoCodec( 3476 OMX_U32 portIndex, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3477 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3478 3479 DescribeHDRStaticInfoParams params; 3480 InitOMXParams(¶ms); 3481 params.nPortIndex = portIndex; 3482 3483 HDRStaticInfo *info = ¶ms.sInfo; 3484 if (getHDRStaticInfoFromFormat(configFormat, info)) { 3485 setHDRStaticInfoIntoFormat(params.sInfo, outputFormat); 3486 } 3487 3488 (void)initDescribeHDRStaticInfoIndex(); 3489 3490 // communicate HDR static Info to codec 3491 return setHDRStaticInfo(params); 3492} 3493 3494// subsequent initial video encoder setup for surface mode 3495status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3496 android_dataspace *dataSpace /* nonnull */) { 3497 DescribeColorAspectsParams params; 3498 InitOMXParams(¶ms); 3499 params.nPortIndex = kPortIndexInput; 3500 ColorAspects &aspects = params.sAspects; 3501 3502 // reset default format and store resulting format into both input and output formats 3503 getColorAspectsFromFormat(mConfigFormat, aspects); 3504 int32_t width, height; 3505 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3506 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3507 } 3508 setColorAspectsIntoFormat(aspects, mInputFormat); 3509 setColorAspectsIntoFormat(aspects, mOutputFormat); 3510 3511 // communicate color aspects to codec, but do not allow any change 3512 ColorAspects origAspects = aspects; 3513 status_t err = OK; 3514 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3515 status_t err = setCodecColorAspects(params, true /* readBack */); 3516 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3517 break; 3518 } 3519 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3520 mComponentName.c_str()); 3521 } 3522 3523 *dataSpace = HAL_DATASPACE_BT709; 3524 aspects = origAspects; // restore desired color aspects 3525 status_t res = getDataSpace( 3526 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3527 if (err == OK) { 3528 err = res; 3529 } 3530 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3531 mInputFormat->setBuffer( 3532 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3533 3534 // update input format with codec supported color aspects (basically set unsupported 3535 // aspects to Unspecified) 3536 if (err == OK) { 3537 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3538 } 3539 3540 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3541 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3542 3543 return err; 3544} 3545 3546status_t ACodec::getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format) { 3547 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3548 DescribeHDRStaticInfoParams params; 3549 InitOMXParams(¶ms); 3550 params.nPortIndex = portIndex; 3551 3552 status_t err = getHDRStaticInfo(params); 3553 if (err == OK) { 3554 // we only set decodec output HDRStaticInfo if codec supports them 3555 setHDRStaticInfoIntoFormat(params.sInfo, format); 3556 } 3557 return err; 3558} 3559 3560status_t ACodec::initDescribeHDRStaticInfoIndex() { 3561 status_t err = mOMXNode->getExtensionIndex( 3562 "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex); 3563 if (err != OK) { 3564 mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0; 3565 } 3566 return err; 3567} 3568 3569status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) { 3570 status_t err = ERROR_UNSUPPORTED; 3571 if (mDescribeHDRStaticInfoIndex) { 3572 err = mOMXNode->setConfig(mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3573 } 3574 3575 const HDRStaticInfo *info = ¶ms.sInfo; 3576 ALOGV("[%s] setting HDRStaticInfo (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, " 3577 "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)", 3578 mComponentName.c_str(), 3579 info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y, 3580 info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y, 3581 info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance, 3582 info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel); 3583 3584 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3585 "[%s] setting HDRStaticInfo failed even though codec advertises support", 3586 mComponentName.c_str()); 3587 return err; 3588} 3589 3590status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms) { 3591 status_t err = ERROR_UNSUPPORTED; 3592 if (mDescribeHDRStaticInfoIndex) { 3593 err = mOMXNode->getConfig(mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3594 } 3595 3596 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3597 "[%s] getting HDRStaticInfo failed even though codec advertises support", 3598 mComponentName.c_str()); 3599 return err; 3600} 3601 3602status_t ACodec::setupVideoEncoder( 3603 const char *mime, const sp<AMessage> &msg, 3604 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3605 int32_t tmp; 3606 if (!msg->findInt32("color-format", &tmp)) { 3607 return INVALID_OPERATION; 3608 } 3609 3610 OMX_COLOR_FORMATTYPE colorFormat = 3611 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3612 3613 status_t err = setVideoPortFormatType( 3614 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3615 3616 if (err != OK) { 3617 ALOGE("[%s] does not support color format %d", 3618 mComponentName.c_str(), colorFormat); 3619 3620 return err; 3621 } 3622 3623 /* Input port configuration */ 3624 3625 OMX_PARAM_PORTDEFINITIONTYPE def; 3626 InitOMXParams(&def); 3627 3628 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3629 3630 def.nPortIndex = kPortIndexInput; 3631 3632 err = mOMXNode->getParameter( 3633 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3634 3635 if (err != OK) { 3636 return err; 3637 } 3638 3639 int32_t width, height, bitrate; 3640 if (!msg->findInt32("width", &width) 3641 || !msg->findInt32("height", &height) 3642 || !msg->findInt32("bitrate", &bitrate)) { 3643 return INVALID_OPERATION; 3644 } 3645 3646 video_def->nFrameWidth = width; 3647 video_def->nFrameHeight = height; 3648 3649 int32_t stride; 3650 if (!msg->findInt32("stride", &stride)) { 3651 stride = width; 3652 } 3653 3654 video_def->nStride = stride; 3655 3656 int32_t sliceHeight; 3657 if (!msg->findInt32("slice-height", &sliceHeight)) { 3658 sliceHeight = height; 3659 } 3660 3661 video_def->nSliceHeight = sliceHeight; 3662 3663 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3664 3665 float frameRate; 3666 if (!msg->findFloat("frame-rate", &frameRate)) { 3667 int32_t tmp; 3668 if (!msg->findInt32("frame-rate", &tmp)) { 3669 return INVALID_OPERATION; 3670 } 3671 frameRate = (float)tmp; 3672 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3673 } 3674 3675 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3676 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3677 // this is redundant as it was already set up in setVideoPortFormatType 3678 // FIXME for now skip this only for flexible YUV formats 3679 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3680 video_def->eColorFormat = colorFormat; 3681 } 3682 3683 err = mOMXNode->setParameter( 3684 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3685 3686 if (err != OK) { 3687 ALOGE("[%s] failed to set input port definition parameters.", 3688 mComponentName.c_str()); 3689 3690 return err; 3691 } 3692 3693 /* Output port configuration */ 3694 3695 OMX_VIDEO_CODINGTYPE compressionFormat; 3696 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3697 3698 if (err != OK) { 3699 return err; 3700 } 3701 3702 err = setVideoPortFormatType( 3703 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3704 3705 if (err != OK) { 3706 ALOGE("[%s] does not support compression format %d", 3707 mComponentName.c_str(), compressionFormat); 3708 3709 return err; 3710 } 3711 3712 def.nPortIndex = kPortIndexOutput; 3713 3714 err = mOMXNode->getParameter( 3715 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3716 3717 if (err != OK) { 3718 return err; 3719 } 3720 3721 video_def->nFrameWidth = width; 3722 video_def->nFrameHeight = height; 3723 video_def->xFramerate = 0; 3724 video_def->nBitrate = bitrate; 3725 video_def->eCompressionFormat = compressionFormat; 3726 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3727 3728 err = mOMXNode->setParameter( 3729 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3730 3731 if (err != OK) { 3732 ALOGE("[%s] failed to set output port definition parameters.", 3733 mComponentName.c_str()); 3734 3735 return err; 3736 } 3737 3738 int32_t intraRefreshPeriod = 0; 3739 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3740 && intraRefreshPeriod >= 0) { 3741 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3742 if (err != OK) { 3743 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3744 mComponentName.c_str()); 3745 err = OK; 3746 } 3747 } 3748 3749 switch (compressionFormat) { 3750 case OMX_VIDEO_CodingMPEG4: 3751 err = setupMPEG4EncoderParameters(msg); 3752 break; 3753 3754 case OMX_VIDEO_CodingH263: 3755 err = setupH263EncoderParameters(msg); 3756 break; 3757 3758 case OMX_VIDEO_CodingAVC: 3759 err = setupAVCEncoderParameters(msg); 3760 break; 3761 3762 case OMX_VIDEO_CodingHEVC: 3763 err = setupHEVCEncoderParameters(msg); 3764 break; 3765 3766 case OMX_VIDEO_CodingVP8: 3767 case OMX_VIDEO_CodingVP9: 3768 err = setupVPXEncoderParameters(msg, outputFormat); 3769 break; 3770 3771 default: 3772 break; 3773 } 3774 3775 if (err != OK) { 3776 return err; 3777 } 3778 3779 // Set up color aspects on input, but propagate them to the output format, as they will 3780 // not be read back from encoder. 3781 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3782 if (err == ERROR_UNSUPPORTED) { 3783 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3784 err = OK; 3785 } 3786 3787 if (err != OK) { 3788 return err; 3789 } 3790 3791 err = setHDRStaticInfoForVideoCodec(kPortIndexInput, msg, outputFormat); 3792 if (err == ERROR_UNSUPPORTED) { // support is optional 3793 ALOGI("[%s] cannot encode HDR static metadata. Ignoring.", mComponentName.c_str()); 3794 err = OK; 3795 } 3796 3797 if (err != OK) { 3798 return err; 3799 } 3800 3801 switch (compressionFormat) { 3802 case OMX_VIDEO_CodingAVC: 3803 case OMX_VIDEO_CodingHEVC: 3804 err = configureTemporalLayers(msg, true /* inConfigure */, outputFormat); 3805 if (err != OK) { 3806 err = OK; // ignore failure 3807 } 3808 break; 3809 3810 case OMX_VIDEO_CodingVP8: 3811 case OMX_VIDEO_CodingVP9: 3812 // TODO: do we need to support android.generic layering? webrtc layering is 3813 // already set up in setupVPXEncoderParameters. 3814 break; 3815 3816 default: 3817 break; 3818 } 3819 3820 if (err == OK) { 3821 ALOGI("setupVideoEncoder succeeded"); 3822 } 3823 3824 return err; 3825} 3826 3827status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3828 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3829 InitOMXParams(¶ms); 3830 params.nPortIndex = kPortIndexOutput; 3831 3832 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3833 3834 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3835 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3836 int32_t mbs; 3837 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3838 return INVALID_OPERATION; 3839 } 3840 params.nCirMBs = mbs; 3841 } 3842 3843 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3844 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3845 int32_t mbs; 3846 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3847 return INVALID_OPERATION; 3848 } 3849 params.nAirMBs = mbs; 3850 3851 int32_t ref; 3852 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3853 return INVALID_OPERATION; 3854 } 3855 params.nAirRef = ref; 3856 } 3857 3858 status_t err = mOMXNode->setParameter( 3859 OMX_IndexParamVideoIntraRefresh, ¶ms, sizeof(params)); 3860 return err; 3861} 3862 3863static OMX_U32 setPFramesSpacing( 3864 float iFramesInterval /* seconds */, int32_t frameRate, uint32_t BFramesSpacing = 0) { 3865 // BFramesSpacing is the number of B frames between I/P frames 3866 // PFramesSpacing (the value to be returned) is the number of P frames between I frames 3867 // 3868 // keyFrameInterval = ((PFramesSpacing + 1) * BFramesSpacing) + PFramesSpacing + 1 3869 // ^^^ ^^^ ^^^ 3870 // number of B frames number of P I frame 3871 // 3872 // = (PFramesSpacing + 1) * (BFramesSpacing + 1) 3873 // 3874 // E.g. 3875 // I P I : I-interval: 8, nPFrames 1, nBFrames 3 3876 // BBB BBB 3877 3878 if (iFramesInterval < 0) { // just 1 key frame 3879 return 0xFFFFFFFE; // don't use maxint as key-frame-interval calculation will add 1 3880 } else if (iFramesInterval == 0) { // just key frames 3881 return 0; 3882 } 3883 3884 // round down as key-frame-interval is an upper limit 3885 uint32_t keyFrameInterval = uint32_t(frameRate * iFramesInterval); 3886 OMX_U32 ret = keyFrameInterval / (BFramesSpacing + 1); 3887 return ret > 0 ? ret - 1 : 0; 3888} 3889 3890static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 3891 int32_t tmp; 3892 if (!msg->findInt32("bitrate-mode", &tmp)) { 3893 return OMX_Video_ControlRateVariable; 3894 } 3895 3896 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 3897} 3898 3899status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 3900 int32_t bitrate; 3901 float iFrameInterval; 3902 if (!msg->findInt32("bitrate", &bitrate) 3903 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 3904 return INVALID_OPERATION; 3905 } 3906 3907 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3908 3909 float frameRate; 3910 if (!msg->findFloat("frame-rate", &frameRate)) { 3911 int32_t tmp; 3912 if (!msg->findInt32("frame-rate", &tmp)) { 3913 return INVALID_OPERATION; 3914 } 3915 frameRate = (float)tmp; 3916 } 3917 3918 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 3919 InitOMXParams(&mpeg4type); 3920 mpeg4type.nPortIndex = kPortIndexOutput; 3921 3922 status_t err = mOMXNode->getParameter( 3923 OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3924 3925 if (err != OK) { 3926 return err; 3927 } 3928 3929 mpeg4type.nSliceHeaderSpacing = 0; 3930 mpeg4type.bSVH = OMX_FALSE; 3931 mpeg4type.bGov = OMX_FALSE; 3932 3933 mpeg4type.nAllowedPictureTypes = 3934 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3935 3936 mpeg4type.nBFrames = 0; 3937 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, mpeg4type.nBFrames); 3938 if (mpeg4type.nPFrames == 0) { 3939 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3940 } 3941 mpeg4type.nIDCVLCThreshold = 0; 3942 mpeg4type.bACPred = OMX_TRUE; 3943 mpeg4type.nMaxPacketSize = 256; 3944 mpeg4type.nTimeIncRes = 1000; 3945 mpeg4type.nHeaderExtension = 0; 3946 mpeg4type.bReversibleVLC = OMX_FALSE; 3947 3948 int32_t profile; 3949 if (msg->findInt32("profile", &profile)) { 3950 int32_t level; 3951 if (!msg->findInt32("level", &level)) { 3952 return INVALID_OPERATION; 3953 } 3954 3955 err = verifySupportForProfileAndLevel(profile, level); 3956 3957 if (err != OK) { 3958 return err; 3959 } 3960 3961 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 3962 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 3963 } 3964 3965 err = mOMXNode->setParameter( 3966 OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3967 3968 if (err != OK) { 3969 return err; 3970 } 3971 3972 err = configureBitrate(bitrate, bitrateMode); 3973 3974 if (err != OK) { 3975 return err; 3976 } 3977 3978 return setupErrorCorrectionParameters(); 3979} 3980 3981status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 3982 int32_t bitrate; 3983 float iFrameInterval; 3984 if (!msg->findInt32("bitrate", &bitrate) 3985 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 3986 return INVALID_OPERATION; 3987 } 3988 3989 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3990 3991 float frameRate; 3992 if (!msg->findFloat("frame-rate", &frameRate)) { 3993 int32_t tmp; 3994 if (!msg->findInt32("frame-rate", &tmp)) { 3995 return INVALID_OPERATION; 3996 } 3997 frameRate = (float)tmp; 3998 } 3999 4000 OMX_VIDEO_PARAM_H263TYPE h263type; 4001 InitOMXParams(&h263type); 4002 h263type.nPortIndex = kPortIndexOutput; 4003 4004 status_t err = mOMXNode->getParameter( 4005 OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 4006 4007 if (err != OK) { 4008 return err; 4009 } 4010 4011 h263type.nAllowedPictureTypes = 4012 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4013 4014 h263type.nBFrames = 0; 4015 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h263type.nBFrames); 4016 if (h263type.nPFrames == 0) { 4017 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4018 } 4019 4020 int32_t profile; 4021 if (msg->findInt32("profile", &profile)) { 4022 int32_t level; 4023 if (!msg->findInt32("level", &level)) { 4024 return INVALID_OPERATION; 4025 } 4026 4027 err = verifySupportForProfileAndLevel(profile, level); 4028 4029 if (err != OK) { 4030 return err; 4031 } 4032 4033 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 4034 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 4035 } 4036 4037 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 4038 h263type.bForceRoundingTypeToZero = OMX_FALSE; 4039 h263type.nPictureHeaderRepetition = 0; 4040 h263type.nGOBHeaderInterval = 0; 4041 4042 err = mOMXNode->setParameter( 4043 OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 4044 4045 if (err != OK) { 4046 return err; 4047 } 4048 4049 err = configureBitrate(bitrate, bitrateMode); 4050 4051 if (err != OK) { 4052 return err; 4053 } 4054 4055 return setupErrorCorrectionParameters(); 4056} 4057 4058// static 4059int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 4060 int width, int height, int rate, int bitrate, 4061 OMX_VIDEO_AVCPROFILETYPE profile) { 4062 // convert bitrate to main/baseline profile kbps equivalent 4063 switch (profile) { 4064 case OMX_VIDEO_AVCProfileHigh10: 4065 bitrate = divUp(bitrate, 3000); break; 4066 case OMX_VIDEO_AVCProfileHigh: 4067 bitrate = divUp(bitrate, 1250); break; 4068 default: 4069 bitrate = divUp(bitrate, 1000); break; 4070 } 4071 4072 // convert size and rate to MBs 4073 width = divUp(width, 16); 4074 height = divUp(height, 16); 4075 int mbs = width * height; 4076 rate *= mbs; 4077 int maxDimension = max(width, height); 4078 4079 static const int limits[][5] = { 4080 /* MBps MB dim bitrate level */ 4081 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 4082 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 4083 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 4084 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 4085 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 4086 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 4087 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 4088 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 4089 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 4090 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 4091 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 4092 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 4093 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 4094 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 4095 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 4096 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 4097 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 4098 }; 4099 4100 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 4101 const int (&limit)[5] = limits[i]; 4102 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 4103 && bitrate <= limit[3]) { 4104 return limit[4]; 4105 } 4106 } 4107 return 0; 4108} 4109 4110status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 4111 int32_t bitrate; 4112 float iFrameInterval; 4113 if (!msg->findInt32("bitrate", &bitrate) 4114 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 4115 return INVALID_OPERATION; 4116 } 4117 4118 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4119 4120 float frameRate; 4121 if (!msg->findFloat("frame-rate", &frameRate)) { 4122 int32_t tmp; 4123 if (!msg->findInt32("frame-rate", &tmp)) { 4124 return INVALID_OPERATION; 4125 } 4126 frameRate = (float)tmp; 4127 } 4128 4129 status_t err = OK; 4130 int32_t intraRefreshMode = 0; 4131 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 4132 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 4133 if (err != OK) { 4134 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 4135 err, intraRefreshMode); 4136 return err; 4137 } 4138 } 4139 4140 OMX_VIDEO_PARAM_AVCTYPE h264type; 4141 InitOMXParams(&h264type); 4142 h264type.nPortIndex = kPortIndexOutput; 4143 4144 err = mOMXNode->getParameter( 4145 OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4146 4147 if (err != OK) { 4148 return err; 4149 } 4150 4151 h264type.nAllowedPictureTypes = 4152 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4153 4154 int32_t profile; 4155 if (msg->findInt32("profile", &profile)) { 4156 int32_t level; 4157 if (!msg->findInt32("level", &level)) { 4158 return INVALID_OPERATION; 4159 } 4160 4161 err = verifySupportForProfileAndLevel(profile, level); 4162 4163 if (err != OK) { 4164 return err; 4165 } 4166 4167 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 4168 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 4169 } else { 4170 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 4171#if 0 /* DON'T YET DEFAULT TO HIGHEST PROFILE */ 4172 // Use largest supported profile for AVC recording if profile is not specified. 4173 for (OMX_VIDEO_AVCPROFILETYPE profile : { 4174 OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCProfileMain }) { 4175 if (verifySupportForProfileAndLevel(profile, 0) == OK) { 4176 h264type.eProfile = profile; 4177 break; 4178 } 4179 } 4180#endif 4181 } 4182 4183 ALOGI("setupAVCEncoderParameters with [profile: %s] [level: %s]", 4184 asString(h264type.eProfile), asString(h264type.eLevel)); 4185 4186 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 4187 h264type.nSliceHeaderSpacing = 0; 4188 h264type.bUseHadamard = OMX_TRUE; 4189 h264type.nRefFrames = 1; 4190 h264type.nBFrames = 0; 4191 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4192 if (h264type.nPFrames == 0) { 4193 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4194 } 4195 h264type.nRefIdx10ActiveMinus1 = 0; 4196 h264type.nRefIdx11ActiveMinus1 = 0; 4197 h264type.bEntropyCodingCABAC = OMX_FALSE; 4198 h264type.bWeightedPPrediction = OMX_FALSE; 4199 h264type.bconstIpred = OMX_FALSE; 4200 h264type.bDirect8x8Inference = OMX_FALSE; 4201 h264type.bDirectSpatialTemporal = OMX_FALSE; 4202 h264type.nCabacInitIdc = 0; 4203 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 4204 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 4205 h264type.nSliceHeaderSpacing = 0; 4206 h264type.bUseHadamard = OMX_TRUE; 4207 h264type.nRefFrames = 2; 4208 h264type.nBFrames = 1; 4209 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4210 h264type.nAllowedPictureTypes = 4211 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 4212 h264type.nRefIdx10ActiveMinus1 = 0; 4213 h264type.nRefIdx11ActiveMinus1 = 0; 4214 h264type.bEntropyCodingCABAC = OMX_TRUE; 4215 h264type.bWeightedPPrediction = OMX_TRUE; 4216 h264type.bconstIpred = OMX_TRUE; 4217 h264type.bDirect8x8Inference = OMX_TRUE; 4218 h264type.bDirectSpatialTemporal = OMX_TRUE; 4219 h264type.nCabacInitIdc = 1; 4220 } 4221 4222 if (h264type.nBFrames != 0) { 4223 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 4224 } 4225 4226 h264type.bEnableUEP = OMX_FALSE; 4227 h264type.bEnableFMO = OMX_FALSE; 4228 h264type.bEnableASO = OMX_FALSE; 4229 h264type.bEnableRS = OMX_FALSE; 4230 h264type.bFrameMBsOnly = OMX_TRUE; 4231 h264type.bMBAFF = OMX_FALSE; 4232 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 4233 4234 err = mOMXNode->setParameter( 4235 OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4236 4237 if (err != OK) { 4238 return err; 4239 } 4240 4241 // TRICKY: if we are enabling temporal layering as well, some codecs may not support layering 4242 // when B-frames are enabled. Detect this now so we can disable B frames if temporal layering 4243 // is preferred. 4244 AString tsSchema; 4245 int32_t preferBFrames = (int32_t)false; 4246 if (msg->findString("ts-schema", &tsSchema) 4247 && (!msg->findInt32("android._prefer-b-frames", &preferBFrames) || !preferBFrames)) { 4248 OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layering; 4249 InitOMXParams(&layering); 4250 layering.nPortIndex = kPortIndexOutput; 4251 if (mOMXNode->getParameter( 4252 (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 4253 &layering, sizeof(layering)) == OK 4254 && layering.eSupportedPatterns 4255 && layering.nBLayerCountMax == 0) { 4256 h264type.nBFrames = 0; 4257 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4258 h264type.nAllowedPictureTypes &= ~OMX_VIDEO_PictureTypeB; 4259 ALOGI("disabling B-frames"); 4260 err = mOMXNode->setParameter( 4261 OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4262 4263 if (err != OK) { 4264 return err; 4265 } 4266 } 4267 } 4268 4269 return configureBitrate(bitrate, bitrateMode); 4270} 4271 4272status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 4273 int32_t bitrate; 4274 float iFrameInterval; 4275 if (!msg->findInt32("bitrate", &bitrate) 4276 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 4277 return INVALID_OPERATION; 4278 } 4279 4280 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4281 4282 float frameRate; 4283 if (!msg->findFloat("frame-rate", &frameRate)) { 4284 int32_t tmp; 4285 if (!msg->findInt32("frame-rate", &tmp)) { 4286 return INVALID_OPERATION; 4287 } 4288 frameRate = (float)tmp; 4289 } 4290 4291 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 4292 InitOMXParams(&hevcType); 4293 hevcType.nPortIndex = kPortIndexOutput; 4294 4295 status_t err = OK; 4296 err = mOMXNode->getParameter( 4297 (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4298 if (err != OK) { 4299 return err; 4300 } 4301 4302 int32_t profile; 4303 if (msg->findInt32("profile", &profile)) { 4304 int32_t level; 4305 if (!msg->findInt32("level", &level)) { 4306 return INVALID_OPERATION; 4307 } 4308 4309 err = verifySupportForProfileAndLevel(profile, level); 4310 if (err != OK) { 4311 return err; 4312 } 4313 4314 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 4315 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 4316 } 4317 // TODO: finer control? 4318 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1; 4319 4320 err = mOMXNode->setParameter( 4321 (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4322 if (err != OK) { 4323 return err; 4324 } 4325 4326 return configureBitrate(bitrate, bitrateMode); 4327} 4328 4329status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg, sp<AMessage> &outputFormat) { 4330 int32_t bitrate; 4331 float iFrameInterval = 0; 4332 size_t tsLayers = 0; 4333 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 4334 OMX_VIDEO_VPXTemporalLayerPatternNone; 4335 static const uint32_t kVp8LayerRateAlloction 4336 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 4337 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 4338 {100, 100, 100}, // 1 layer 4339 { 60, 100, 100}, // 2 layers {60%, 40%} 4340 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 4341 }; 4342 if (!msg->findInt32("bitrate", &bitrate)) { 4343 return INVALID_OPERATION; 4344 } 4345 msg->findAsFloat("i-frame-interval", &iFrameInterval); 4346 4347 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4348 4349 float frameRate; 4350 if (!msg->findFloat("frame-rate", &frameRate)) { 4351 int32_t tmp; 4352 if (!msg->findInt32("frame-rate", &tmp)) { 4353 return INVALID_OPERATION; 4354 } 4355 frameRate = (float)tmp; 4356 } 4357 4358 AString tsSchema; 4359 OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE tsType = 4360 OMX_VIDEO_AndroidTemporalLayeringPatternNone; 4361 4362 if (msg->findString("ts-schema", &tsSchema)) { 4363 unsigned int numLayers = 0; 4364 unsigned int numBLayers = 0; 4365 int tags; 4366 char dummy; 4367 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1 4368 && numLayers > 0) { 4369 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4370 tsType = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC; 4371 tsLayers = numLayers; 4372 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c", 4373 &numLayers, &dummy, &numBLayers, &dummy)) 4374 && (tags == 1 || (tags == 3 && dummy == '+')) 4375 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) { 4376 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4377 // VPX does not have a concept of B-frames, so just count all layers 4378 tsType = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid; 4379 tsLayers = numLayers + numBLayers; 4380 } else { 4381 ALOGW("Ignoring unsupported ts-schema [%s]", tsSchema.c_str()); 4382 } 4383 tsLayers = min(tsLayers, (size_t)OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS); 4384 } 4385 4386 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4387 InitOMXParams(&vp8type); 4388 vp8type.nPortIndex = kPortIndexOutput; 4389 status_t err = mOMXNode->getParameter( 4390 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4391 &vp8type, sizeof(vp8type)); 4392 4393 if (err == OK) { 4394 if (iFrameInterval > 0) { 4395 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1; 4396 } 4397 vp8type.eTemporalPattern = pattern; 4398 vp8type.nTemporalLayerCount = tsLayers; 4399 if (tsLayers > 0) { 4400 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4401 vp8type.nTemporalLayerBitrateRatio[i] = 4402 kVp8LayerRateAlloction[tsLayers - 1][i]; 4403 } 4404 } 4405 if (bitrateMode == OMX_Video_ControlRateConstant) { 4406 vp8type.nMinQuantizer = 2; 4407 vp8type.nMaxQuantizer = 63; 4408 } 4409 4410 err = mOMXNode->setParameter( 4411 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4412 &vp8type, sizeof(vp8type)); 4413 if (err != OK) { 4414 ALOGW("Extended VP8 parameters set failed: %d", err); 4415 } else if (tsType == OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC) { 4416 // advertise even single layer WebRTC layering, as it is defined 4417 outputFormat->setString("ts-schema", AStringPrintf("webrtc.vp8.%u-layer", tsLayers)); 4418 } else if (tsLayers > 0) { 4419 // tsType == OMX_VIDEO_AndroidTemporalLayeringPatternAndroid 4420 outputFormat->setString("ts-schema", AStringPrintf("android.generic.%u", tsLayers)); 4421 } 4422 } 4423 4424 return configureBitrate(bitrate, bitrateMode); 4425} 4426 4427status_t ACodec::verifySupportForProfileAndLevel( 4428 int32_t profile, int32_t level) { 4429 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4430 InitOMXParams(¶ms); 4431 params.nPortIndex = kPortIndexOutput; 4432 4433 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4434 params.nProfileIndex = index; 4435 status_t err = mOMXNode->getParameter( 4436 OMX_IndexParamVideoProfileLevelQuerySupported, 4437 ¶ms, sizeof(params)); 4438 4439 if (err != OK) { 4440 return err; 4441 } 4442 4443 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4444 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4445 4446 if (profile == supportedProfile && level <= supportedLevel) { 4447 return OK; 4448 } 4449 4450 if (index == kMaxIndicesToCheck) { 4451 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4452 mComponentName.c_str(), index, 4453 params.eProfile, params.eLevel); 4454 } 4455 } 4456 return ERROR_UNSUPPORTED; 4457} 4458 4459status_t ACodec::configureBitrate( 4460 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4461 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4462 InitOMXParams(&bitrateType); 4463 bitrateType.nPortIndex = kPortIndexOutput; 4464 4465 status_t err = mOMXNode->getParameter( 4466 OMX_IndexParamVideoBitrate, &bitrateType, sizeof(bitrateType)); 4467 4468 if (err != OK) { 4469 return err; 4470 } 4471 4472 bitrateType.eControlRate = bitrateMode; 4473 bitrateType.nTargetBitrate = bitrate; 4474 4475 return mOMXNode->setParameter( 4476 OMX_IndexParamVideoBitrate, &bitrateType, sizeof(bitrateType)); 4477} 4478 4479status_t ACodec::setupErrorCorrectionParameters() { 4480 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4481 InitOMXParams(&errorCorrectionType); 4482 errorCorrectionType.nPortIndex = kPortIndexOutput; 4483 4484 status_t err = mOMXNode->getParameter( 4485 OMX_IndexParamVideoErrorCorrection, 4486 &errorCorrectionType, sizeof(errorCorrectionType)); 4487 4488 if (err != OK) { 4489 return OK; // Optional feature. Ignore this failure 4490 } 4491 4492 errorCorrectionType.bEnableHEC = OMX_FALSE; 4493 errorCorrectionType.bEnableResync = OMX_TRUE; 4494 errorCorrectionType.nResynchMarkerSpacing = 256; 4495 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4496 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4497 4498 return mOMXNode->setParameter( 4499 OMX_IndexParamVideoErrorCorrection, 4500 &errorCorrectionType, sizeof(errorCorrectionType)); 4501} 4502 4503status_t ACodec::setVideoFormatOnPort( 4504 OMX_U32 portIndex, 4505 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4506 float frameRate) { 4507 OMX_PARAM_PORTDEFINITIONTYPE def; 4508 InitOMXParams(&def); 4509 def.nPortIndex = portIndex; 4510 4511 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4512 4513 status_t err = mOMXNode->getParameter( 4514 OMX_IndexParamPortDefinition, &def, sizeof(def)); 4515 if (err != OK) { 4516 return err; 4517 } 4518 4519 if (portIndex == kPortIndexInput) { 4520 // XXX Need a (much) better heuristic to compute input buffer sizes. 4521 const size_t X = 64 * 1024; 4522 if (def.nBufferSize < X) { 4523 def.nBufferSize = X; 4524 } 4525 } 4526 4527 if (def.eDomain != OMX_PortDomainVideo) { 4528 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4529 return FAILED_TRANSACTION; 4530 } 4531 4532 video_def->nFrameWidth = width; 4533 video_def->nFrameHeight = height; 4534 4535 if (portIndex == kPortIndexInput) { 4536 video_def->eCompressionFormat = compressionFormat; 4537 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4538 if (frameRate >= 0) { 4539 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4540 } 4541 } 4542 4543 err = mOMXNode->setParameter( 4544 OMX_IndexParamPortDefinition, &def, sizeof(def)); 4545 4546 return err; 4547} 4548 4549status_t ACodec::initNativeWindow() { 4550 if (mNativeWindow != NULL) { 4551 return mOMXNode->enableNativeBuffers(kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4552 } 4553 4554 mOMXNode->enableNativeBuffers(kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4555 return OK; 4556} 4557 4558size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4559 size_t n = 0; 4560 4561 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4562 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4563 4564 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4565 ++n; 4566 } 4567 } 4568 4569 return n; 4570} 4571 4572size_t ACodec::countBuffersOwnedByNativeWindow() const { 4573 size_t n = 0; 4574 4575 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4576 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4577 4578 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4579 ++n; 4580 } 4581 } 4582 4583 return n; 4584} 4585 4586void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4587 if (mNativeWindow == NULL) { 4588 return; 4589 } 4590 4591 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4592 && dequeueBufferFromNativeWindow() != NULL) { 4593 // these buffers will be submitted as regular buffers; account for this 4594 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4595 --mMetadataBuffersToSubmit; 4596 } 4597 } 4598} 4599 4600bool ACodec::allYourBuffersAreBelongToUs( 4601 OMX_U32 portIndex) { 4602 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4603 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4604 4605 if (info->mStatus != BufferInfo::OWNED_BY_US 4606 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4607 ALOGV("[%s] Buffer %u on port %u still has status %d", 4608 mComponentName.c_str(), 4609 info->mBufferID, portIndex, info->mStatus); 4610 return false; 4611 } 4612 } 4613 4614 return true; 4615} 4616 4617bool ACodec::allYourBuffersAreBelongToUs() { 4618 return allYourBuffersAreBelongToUs(kPortIndexInput) 4619 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4620} 4621 4622void ACodec::deferMessage(const sp<AMessage> &msg) { 4623 mDeferredQueue.push_back(msg); 4624} 4625 4626void ACodec::processDeferredMessages() { 4627 List<sp<AMessage> > queue = mDeferredQueue; 4628 mDeferredQueue.clear(); 4629 4630 List<sp<AMessage> >::iterator it = queue.begin(); 4631 while (it != queue.end()) { 4632 onMessageReceived(*it++); 4633 } 4634} 4635 4636status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4637 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4638 OMX_PARAM_PORTDEFINITIONTYPE def; 4639 InitOMXParams(&def); 4640 def.nPortIndex = portIndex; 4641 4642 status_t err = mOMXNode->getParameter(OMX_IndexParamPortDefinition, &def, sizeof(def)); 4643 if (err != OK) { 4644 return err; 4645 } 4646 4647 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4648 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4649 return BAD_VALUE; 4650 } 4651 4652 switch (def.eDomain) { 4653 case OMX_PortDomainVideo: 4654 { 4655 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4656 switch ((int)videoDef->eCompressionFormat) { 4657 case OMX_VIDEO_CodingUnused: 4658 { 4659 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4660 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4661 4662 notify->setInt32("stride", videoDef->nStride); 4663 notify->setInt32("slice-height", videoDef->nSliceHeight); 4664 notify->setInt32("color-format", videoDef->eColorFormat); 4665 4666 if (mNativeWindow == NULL) { 4667 DescribeColorFormat2Params describeParams; 4668 InitOMXParams(&describeParams); 4669 describeParams.eColorFormat = videoDef->eColorFormat; 4670 describeParams.nFrameWidth = videoDef->nFrameWidth; 4671 describeParams.nFrameHeight = videoDef->nFrameHeight; 4672 describeParams.nStride = videoDef->nStride; 4673 describeParams.nSliceHeight = videoDef->nSliceHeight; 4674 describeParams.bUsingNativeBuffers = OMX_FALSE; 4675 4676 if (DescribeColorFormat(mOMXNode, describeParams)) { 4677 notify->setBuffer( 4678 "image-data", 4679 ABuffer::CreateAsCopy( 4680 &describeParams.sMediaImage, 4681 sizeof(describeParams.sMediaImage))); 4682 4683 MediaImage2 &img = describeParams.sMediaImage; 4684 MediaImage2::PlaneInfo *plane = img.mPlane; 4685 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4686 mComponentName.c_str(), img.mWidth, img.mHeight, 4687 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4688 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4689 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4690 } 4691 } 4692 4693 int32_t width = (int32_t)videoDef->nFrameWidth; 4694 int32_t height = (int32_t)videoDef->nFrameHeight; 4695 4696 if (portIndex == kPortIndexOutput) { 4697 OMX_CONFIG_RECTTYPE rect; 4698 InitOMXParams(&rect); 4699 rect.nPortIndex = portIndex; 4700 4701 if (mOMXNode->getConfig( 4702 (portIndex == kPortIndexOutput ? 4703 OMX_IndexConfigCommonOutputCrop : 4704 OMX_IndexConfigCommonInputCrop), 4705 &rect, sizeof(rect)) != OK) { 4706 rect.nLeft = 0; 4707 rect.nTop = 0; 4708 rect.nWidth = videoDef->nFrameWidth; 4709 rect.nHeight = videoDef->nFrameHeight; 4710 } 4711 4712 if (rect.nLeft < 0 || 4713 rect.nTop < 0 || 4714 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4715 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4716 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 4717 rect.nLeft, rect.nTop, 4718 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 4719 videoDef->nFrameWidth, videoDef->nFrameHeight); 4720 return BAD_VALUE; 4721 } 4722 4723 notify->setRect( 4724 "crop", 4725 rect.nLeft, 4726 rect.nTop, 4727 rect.nLeft + rect.nWidth - 1, 4728 rect.nTop + rect.nHeight - 1); 4729 4730 width = rect.nWidth; 4731 height = rect.nHeight; 4732 4733 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4734 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4735 width, height, mConfigFormat, notify, 4736 mUsingNativeWindow ? &dataSpace : NULL); 4737 if (mUsingNativeWindow) { 4738 notify->setInt32("android._dataspace", dataSpace); 4739 } 4740 (void)getHDRStaticInfoForVideoCodec(kPortIndexOutput, notify); 4741 } else { 4742 (void)getInputColorAspectsForVideoEncoder(notify); 4743 if (mConfigFormat->contains("hdr-static-info")) { 4744 (void)getHDRStaticInfoForVideoCodec(kPortIndexInput, notify); 4745 } 4746 } 4747 4748 break; 4749 } 4750 4751 case OMX_VIDEO_CodingVP8: 4752 case OMX_VIDEO_CodingVP9: 4753 { 4754 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4755 InitOMXParams(&vp8type); 4756 vp8type.nPortIndex = kPortIndexOutput; 4757 status_t err = mOMXNode->getParameter( 4758 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4759 &vp8type, 4760 sizeof(vp8type)); 4761 4762 if (err == OK) { 4763 if (vp8type.eTemporalPattern == OMX_VIDEO_VPXTemporalLayerPatternWebRTC 4764 && vp8type.nTemporalLayerCount > 0 4765 && vp8type.nTemporalLayerCount 4766 <= OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS) { 4767 // advertise as android.generic if we configured for android.generic 4768 AString origSchema; 4769 if (notify->findString("ts-schema", &origSchema) 4770 && origSchema.startsWith("android.generic")) { 4771 notify->setString("ts-schema", AStringPrintf( 4772 "android.generic.%u", vp8type.nTemporalLayerCount)); 4773 } else { 4774 notify->setString("ts-schema", AStringPrintf( 4775 "webrtc.vp8.%u-layer", vp8type.nTemporalLayerCount)); 4776 } 4777 } 4778 } 4779 // Fall through to set up mime. 4780 } 4781 4782 default: 4783 { 4784 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 4785 // should be CodingUnused 4786 ALOGE("Raw port video compression format is %s(%d)", 4787 asString(videoDef->eCompressionFormat), 4788 videoDef->eCompressionFormat); 4789 return BAD_VALUE; 4790 } 4791 AString mime; 4792 if (GetMimeTypeForVideoCoding( 4793 videoDef->eCompressionFormat, &mime) != OK) { 4794 notify->setString("mime", "application/octet-stream"); 4795 } else { 4796 notify->setString("mime", mime.c_str()); 4797 } 4798 uint32_t intraRefreshPeriod = 0; 4799 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 4800 && intraRefreshPeriod > 0) { 4801 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 4802 } 4803 break; 4804 } 4805 } 4806 notify->setInt32("width", videoDef->nFrameWidth); 4807 notify->setInt32("height", videoDef->nFrameHeight); 4808 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 4809 portIndex == kPortIndexInput ? "input" : "output", 4810 notify->debugString().c_str()); 4811 4812 break; 4813 } 4814 4815 case OMX_PortDomainAudio: 4816 { 4817 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 4818 4819 switch ((int)audioDef->eEncoding) { 4820 case OMX_AUDIO_CodingPCM: 4821 { 4822 OMX_AUDIO_PARAM_PCMMODETYPE params; 4823 InitOMXParams(¶ms); 4824 params.nPortIndex = portIndex; 4825 4826 err = mOMXNode->getParameter( 4827 OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4828 if (err != OK) { 4829 return err; 4830 } 4831 4832 if (params.nChannels <= 0 4833 || (params.nChannels != 1 && !params.bInterleaved) 4834 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 4835 ALOGE("unsupported PCM port: %u channels%s, %u-bit", 4836 params.nChannels, 4837 params.bInterleaved ? " interleaved" : "", 4838 params.nBitPerSample); 4839 return FAILED_TRANSACTION; 4840 } 4841 4842 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 4843 notify->setInt32("channel-count", params.nChannels); 4844 notify->setInt32("sample-rate", params.nSamplingRate); 4845 4846 AudioEncoding encoding = kAudioEncodingPcm16bit; 4847 if (params.eNumData == OMX_NumericalDataUnsigned 4848 && params.nBitPerSample == 8u) { 4849 encoding = kAudioEncodingPcm8bit; 4850 } else if (params.eNumData == OMX_NumericalDataFloat 4851 && params.nBitPerSample == 32u) { 4852 encoding = kAudioEncodingPcmFloat; 4853 } else if (params.nBitPerSample != 16u 4854 || params.eNumData != OMX_NumericalDataSigned) { 4855 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", 4856 asString(params.eNumData), params.eNumData, 4857 asString(params.ePCMMode), params.ePCMMode); 4858 return FAILED_TRANSACTION; 4859 } 4860 notify->setInt32("pcm-encoding", encoding); 4861 4862 if (mChannelMaskPresent) { 4863 notify->setInt32("channel-mask", mChannelMask); 4864 } 4865 break; 4866 } 4867 4868 case OMX_AUDIO_CodingAAC: 4869 { 4870 OMX_AUDIO_PARAM_AACPROFILETYPE params; 4871 InitOMXParams(¶ms); 4872 params.nPortIndex = portIndex; 4873 4874 err = mOMXNode->getParameter( 4875 OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 4876 if (err != OK) { 4877 return err; 4878 } 4879 4880 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 4881 notify->setInt32("channel-count", params.nChannels); 4882 notify->setInt32("sample-rate", params.nSampleRate); 4883 break; 4884 } 4885 4886 case OMX_AUDIO_CodingAMR: 4887 { 4888 OMX_AUDIO_PARAM_AMRTYPE params; 4889 InitOMXParams(¶ms); 4890 params.nPortIndex = portIndex; 4891 4892 err = mOMXNode->getParameter( 4893 OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 4894 if (err != OK) { 4895 return err; 4896 } 4897 4898 notify->setInt32("channel-count", 1); 4899 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 4900 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 4901 notify->setInt32("sample-rate", 16000); 4902 } else { 4903 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 4904 notify->setInt32("sample-rate", 8000); 4905 } 4906 break; 4907 } 4908 4909 case OMX_AUDIO_CodingFLAC: 4910 { 4911 OMX_AUDIO_PARAM_FLACTYPE params; 4912 InitOMXParams(¶ms); 4913 params.nPortIndex = portIndex; 4914 4915 err = mOMXNode->getParameter( 4916 OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 4917 if (err != OK) { 4918 return err; 4919 } 4920 4921 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 4922 notify->setInt32("channel-count", params.nChannels); 4923 notify->setInt32("sample-rate", params.nSampleRate); 4924 break; 4925 } 4926 4927 case OMX_AUDIO_CodingMP3: 4928 { 4929 OMX_AUDIO_PARAM_MP3TYPE params; 4930 InitOMXParams(¶ms); 4931 params.nPortIndex = portIndex; 4932 4933 err = mOMXNode->getParameter( 4934 OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 4935 if (err != OK) { 4936 return err; 4937 } 4938 4939 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 4940 notify->setInt32("channel-count", params.nChannels); 4941 notify->setInt32("sample-rate", params.nSampleRate); 4942 break; 4943 } 4944 4945 case OMX_AUDIO_CodingVORBIS: 4946 { 4947 OMX_AUDIO_PARAM_VORBISTYPE params; 4948 InitOMXParams(¶ms); 4949 params.nPortIndex = portIndex; 4950 4951 err = mOMXNode->getParameter( 4952 OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 4953 if (err != OK) { 4954 return err; 4955 } 4956 4957 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 4958 notify->setInt32("channel-count", params.nChannels); 4959 notify->setInt32("sample-rate", params.nSampleRate); 4960 break; 4961 } 4962 4963 case OMX_AUDIO_CodingAndroidAC3: 4964 { 4965 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 4966 InitOMXParams(¶ms); 4967 params.nPortIndex = portIndex; 4968 4969 err = mOMXNode->getParameter( 4970 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 4971 ¶ms, sizeof(params)); 4972 if (err != OK) { 4973 return err; 4974 } 4975 4976 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 4977 notify->setInt32("channel-count", params.nChannels); 4978 notify->setInt32("sample-rate", params.nSampleRate); 4979 break; 4980 } 4981 4982 case OMX_AUDIO_CodingAndroidEAC3: 4983 { 4984 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 4985 InitOMXParams(¶ms); 4986 params.nPortIndex = portIndex; 4987 4988 err = mOMXNode->getParameter( 4989 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 4990 ¶ms, sizeof(params)); 4991 if (err != OK) { 4992 return err; 4993 } 4994 4995 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 4996 notify->setInt32("channel-count", params.nChannels); 4997 notify->setInt32("sample-rate", params.nSampleRate); 4998 break; 4999 } 5000 5001 case OMX_AUDIO_CodingAndroidOPUS: 5002 { 5003 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 5004 InitOMXParams(¶ms); 5005 params.nPortIndex = portIndex; 5006 5007 err = mOMXNode->getParameter( 5008 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 5009 ¶ms, sizeof(params)); 5010 if (err != OK) { 5011 return err; 5012 } 5013 5014 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 5015 notify->setInt32("channel-count", params.nChannels); 5016 notify->setInt32("sample-rate", params.nSampleRate); 5017 break; 5018 } 5019 5020 case OMX_AUDIO_CodingG711: 5021 { 5022 OMX_AUDIO_PARAM_PCMMODETYPE params; 5023 InitOMXParams(¶ms); 5024 params.nPortIndex = portIndex; 5025 5026 err = mOMXNode->getParameter( 5027 (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5028 if (err != OK) { 5029 return err; 5030 } 5031 5032 const char *mime = NULL; 5033 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 5034 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 5035 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 5036 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 5037 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 5038 mime = MEDIA_MIMETYPE_AUDIO_RAW; 5039 } 5040 notify->setString("mime", mime); 5041 notify->setInt32("channel-count", params.nChannels); 5042 notify->setInt32("sample-rate", params.nSamplingRate); 5043 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 5044 break; 5045 } 5046 5047 case OMX_AUDIO_CodingGSMFR: 5048 { 5049 OMX_AUDIO_PARAM_PCMMODETYPE params; 5050 InitOMXParams(¶ms); 5051 params.nPortIndex = portIndex; 5052 5053 err = mOMXNode->getParameter( 5054 OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5055 if (err != OK) { 5056 return err; 5057 } 5058 5059 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 5060 notify->setInt32("channel-count", params.nChannels); 5061 notify->setInt32("sample-rate", params.nSamplingRate); 5062 break; 5063 } 5064 5065 default: 5066 ALOGE("Unsupported audio coding: %s(%d)\n", 5067 asString(audioDef->eEncoding), audioDef->eEncoding); 5068 return BAD_TYPE; 5069 } 5070 break; 5071 } 5072 5073 default: 5074 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 5075 return BAD_TYPE; 5076 } 5077 5078 return OK; 5079} 5080 5081void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 5082 // aspects are normally communicated in ColorAspects 5083 int32_t range, standard, transfer; 5084 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 5085 5086 // if some aspects are unspecified, use dataspace fields 5087 if (range != 0) { 5088 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 5089 } 5090 if (standard != 0) { 5091 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 5092 } 5093 if (transfer != 0) { 5094 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 5095 } 5096 5097 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 5098 if (range != 0) { 5099 mOutputFormat->setInt32("color-range", range); 5100 } 5101 if (standard != 0) { 5102 mOutputFormat->setInt32("color-standard", standard); 5103 } 5104 if (transfer != 0) { 5105 mOutputFormat->setInt32("color-transfer", transfer); 5106 } 5107 5108 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 5109 "(R:%d(%s), S:%d(%s), T:%d(%s))", 5110 dataSpace, 5111 aspects.mRange, asString(aspects.mRange), 5112 aspects.mPrimaries, asString(aspects.mPrimaries), 5113 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 5114 aspects.mTransfer, asString(aspects.mTransfer), 5115 range, asString((ColorRange)range), 5116 standard, asString((ColorStandard)standard), 5117 transfer, asString((ColorTransfer)transfer)); 5118} 5119 5120void ACodec::onOutputFormatChanged(sp<const AMessage> expectedFormat) { 5121 // store new output format, at the same time mark that this is no longer the first frame 5122 mOutputFormat = mBaseOutputFormat->dup(); 5123 5124 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 5125 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 5126 return; 5127 } 5128 5129 if (expectedFormat != NULL) { 5130 sp<const AMessage> changes = expectedFormat->changesFrom(mOutputFormat); 5131 sp<const AMessage> to = mOutputFormat->changesFrom(expectedFormat); 5132 if (changes->countEntries() != 0 || to->countEntries() != 0) { 5133 ALOGW("[%s] BAD CODEC: Output format changed unexpectedly from (diff) %s to (diff) %s", 5134 mComponentName.c_str(), 5135 changes->debugString(4).c_str(), to->debugString(4).c_str()); 5136 } 5137 } 5138 5139 if (!mIsVideo && !mIsEncoder) { 5140 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 5141 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5142 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 5143 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5144 5145 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 5146 if (mConverter[kPortIndexOutput] != NULL) { 5147 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 5148 } 5149 } 5150 5151 if (mTunneled) { 5152 sendFormatChange(); 5153 } 5154} 5155 5156void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 5157 AString mime; 5158 CHECK(mOutputFormat->findString("mime", &mime)); 5159 5160 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 5161 // notify renderer of the crop change and dataspace change 5162 // NOTE: native window uses extended right-bottom coordinate 5163 int32_t left, top, right, bottom; 5164 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 5165 notify->setRect("crop", left, top, right + 1, bottom + 1); 5166 } 5167 5168 int32_t dataSpace; 5169 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 5170 notify->setInt32("dataspace", dataSpace); 5171 } 5172 } 5173} 5174 5175void ACodec::sendFormatChange() { 5176 AString mime; 5177 CHECK(mOutputFormat->findString("mime", &mime)); 5178 5179 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 5180 int32_t channelCount, sampleRate; 5181 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 5182 CHECK(mOutputFormat->findInt32("sample-rate", &sampleRate)); 5183 if (mSampleRate != 0 && sampleRate != 0) { 5184 mEncoderDelay = mEncoderDelay * sampleRate / mSampleRate; 5185 mEncoderPadding = mEncoderPadding * sampleRate / mSampleRate; 5186 mSampleRate = sampleRate; 5187 } 5188 if (mSkipCutBuffer != NULL) { 5189 size_t prevbufsize = mSkipCutBuffer->size(); 5190 if (prevbufsize != 0) { 5191 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 5192 } 5193 } 5194 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 5195 } 5196 5197 sp<AMessage> notify = mNotify->dup(); 5198 notify->setInt32("what", kWhatOutputFormatChanged); 5199 notify->setMessage("format", mOutputFormat); 5200 notify->post(); 5201 5202 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 5203 mLastOutputFormat = mOutputFormat; 5204} 5205 5206void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 5207 sp<AMessage> notify = mNotify->dup(); 5208 notify->setInt32("what", CodecBase::kWhatError); 5209 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 5210 5211 if (internalError == UNKNOWN_ERROR) { // find better error code 5212 const status_t omxStatus = statusFromOMXError(error); 5213 if (omxStatus != 0) { 5214 internalError = omxStatus; 5215 } else { 5216 ALOGW("Invalid OMX error %#x", error); 5217 } 5218 } 5219 5220 mFatalError = true; 5221 5222 notify->setInt32("err", internalError); 5223 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5224 notify->post(); 5225} 5226 5227status_t ACodec::requestIDRFrame() { 5228 if (!mIsEncoder) { 5229 return ERROR_UNSUPPORTED; 5230 } 5231 5232 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5233 InitOMXParams(¶ms); 5234 5235 params.nPortIndex = kPortIndexOutput; 5236 params.IntraRefreshVOP = OMX_TRUE; 5237 5238 return mOMXNode->setConfig( 5239 OMX_IndexConfigVideoIntraVOPRefresh, 5240 ¶ms, 5241 sizeof(params)); 5242} 5243 5244//////////////////////////////////////////////////////////////////////////////// 5245 5246ACodec::PortDescription::PortDescription() { 5247} 5248 5249void ACodec::PortDescription::addBuffer( 5250 IOMX::buffer_id id, const sp<MediaCodecBuffer> &buffer, 5251 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) { 5252 mBufferIDs.push_back(id); 5253 mBuffers.push_back(buffer); 5254 mHandles.push_back(handle); 5255 mMemRefs.push_back(memRef); 5256} 5257 5258size_t ACodec::PortDescription::countBuffers() { 5259 return mBufferIDs.size(); 5260} 5261 5262IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5263 return mBufferIDs.itemAt(index); 5264} 5265 5266sp<MediaCodecBuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5267 return mBuffers.itemAt(index); 5268} 5269 5270sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const { 5271 return mHandles.itemAt(index); 5272} 5273 5274sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const { 5275 return mMemRefs.itemAt(index); 5276} 5277 5278//////////////////////////////////////////////////////////////////////////////// 5279 5280ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5281 : AState(parentState), 5282 mCodec(codec) { 5283} 5284 5285ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5286 OMX_U32 /* portIndex */) { 5287 return KEEP_BUFFERS; 5288} 5289 5290bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5291 switch (msg->what()) { 5292 case kWhatInputBufferFilled: 5293 { 5294 onInputBufferFilled(msg); 5295 break; 5296 } 5297 5298 case kWhatOutputBufferDrained: 5299 { 5300 onOutputBufferDrained(msg); 5301 break; 5302 } 5303 5304 case ACodec::kWhatOMXMessageList: 5305 { 5306 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5307 } 5308 5309 case ACodec::kWhatOMXMessageItem: 5310 { 5311 // no need to check as we already did it for kWhatOMXMessageList 5312 return onOMXMessage(msg); 5313 } 5314 5315 case ACodec::kWhatOMXMessage: 5316 { 5317 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5318 } 5319 5320 case ACodec::kWhatSetSurface: 5321 { 5322 sp<AReplyToken> replyID; 5323 CHECK(msg->senderAwaitsResponse(&replyID)); 5324 5325 sp<RefBase> obj; 5326 CHECK(msg->findObject("surface", &obj)); 5327 5328 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5329 5330 sp<AMessage> response = new AMessage; 5331 response->setInt32("err", err); 5332 response->postReply(replyID); 5333 break; 5334 } 5335 5336 case ACodec::kWhatCreateInputSurface: 5337 case ACodec::kWhatSetInputSurface: 5338 case ACodec::kWhatSignalEndOfInputStream: 5339 { 5340 // This may result in an app illegal state exception. 5341 ALOGE("Message 0x%x was not handled", msg->what()); 5342 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5343 return true; 5344 } 5345 5346 case ACodec::kWhatOMXDied: 5347 { 5348 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5349 ALOGE("OMX/mediaserver died, signalling error!"); 5350 mCodec->mGraphicBufferSource.clear(); 5351 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5352 break; 5353 } 5354 5355 case ACodec::kWhatReleaseCodecInstance: 5356 { 5357 ALOGI("[%s] forcing the release of codec", 5358 mCodec->mComponentName.c_str()); 5359 status_t err = mCodec->mOMXNode->freeNode(); 5360 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5361 mCodec->mComponentName.c_str(), err); 5362 sp<AMessage> notify = mCodec->mNotify->dup(); 5363 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5364 notify->post(); 5365 break; 5366 } 5367 5368 default: 5369 return false; 5370 } 5371 5372 return true; 5373} 5374 5375bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5376 // there is a possibility that this is an outstanding message for a 5377 // codec that we have already destroyed 5378 if (mCodec->mOMXNode == NULL) { 5379 ALOGI("ignoring message as already freed component: %s", 5380 msg->debugString().c_str()); 5381 return false; 5382 } 5383 5384 int32_t generation; 5385 CHECK(msg->findInt32("generation", (int32_t*)&generation)); 5386 if (generation != mCodec->mNodeGeneration) { 5387 ALOGW("Unexpected message for component: %s, gen %u, cur %u", 5388 msg->debugString().c_str(), generation, mCodec->mNodeGeneration); 5389 return false; 5390 } 5391 return true; 5392} 5393 5394bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5395 sp<RefBase> obj; 5396 CHECK(msg->findObject("messages", &obj)); 5397 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5398 5399 bool receivedRenderedEvents = false; 5400 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5401 it != msgList->getList().cend(); ++it) { 5402 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5403 mCodec->handleMessage(*it); 5404 int32_t type; 5405 CHECK((*it)->findInt32("type", &type)); 5406 if (type == omx_message::FRAME_RENDERED) { 5407 receivedRenderedEvents = true; 5408 } 5409 } 5410 5411 if (receivedRenderedEvents) { 5412 // NOTE: all buffers are rendered in this case 5413 mCodec->notifyOfRenderedFrames(); 5414 } 5415 return true; 5416} 5417 5418bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5419 int32_t type; 5420 CHECK(msg->findInt32("type", &type)); 5421 5422 switch (type) { 5423 case omx_message::EVENT: 5424 { 5425 int32_t event, data1, data2; 5426 CHECK(msg->findInt32("event", &event)); 5427 CHECK(msg->findInt32("data1", &data1)); 5428 CHECK(msg->findInt32("data2", &data2)); 5429 5430 if (event == OMX_EventCmdComplete 5431 && data1 == OMX_CommandFlush 5432 && data2 == (int32_t)OMX_ALL) { 5433 // Use of this notification is not consistent across 5434 // implementations. We'll drop this notification and rely 5435 // on flush-complete notifications on the individual port 5436 // indices instead. 5437 5438 return true; 5439 } 5440 5441 return onOMXEvent( 5442 static_cast<OMX_EVENTTYPE>(event), 5443 static_cast<OMX_U32>(data1), 5444 static_cast<OMX_U32>(data2)); 5445 } 5446 5447 case omx_message::EMPTY_BUFFER_DONE: 5448 { 5449 IOMX::buffer_id bufferID; 5450 int32_t fenceFd; 5451 5452 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5453 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5454 5455 return onOMXEmptyBufferDone(bufferID, fenceFd); 5456 } 5457 5458 case omx_message::FILL_BUFFER_DONE: 5459 { 5460 IOMX::buffer_id bufferID; 5461 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5462 5463 int32_t rangeOffset, rangeLength, flags, fenceFd; 5464 int64_t timeUs; 5465 5466 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5467 CHECK(msg->findInt32("range_length", &rangeLength)); 5468 CHECK(msg->findInt32("flags", &flags)); 5469 CHECK(msg->findInt64("timestamp", &timeUs)); 5470 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5471 5472 return onOMXFillBufferDone( 5473 bufferID, 5474 (size_t)rangeOffset, (size_t)rangeLength, 5475 (OMX_U32)flags, 5476 timeUs, 5477 fenceFd); 5478 } 5479 5480 case omx_message::FRAME_RENDERED: 5481 { 5482 int64_t mediaTimeUs, systemNano; 5483 5484 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5485 CHECK(msg->findInt64("system_nano", &systemNano)); 5486 5487 return onOMXFrameRendered( 5488 mediaTimeUs, systemNano); 5489 } 5490 5491 default: 5492 ALOGE("Unexpected message type: %d", type); 5493 return false; 5494 } 5495} 5496 5497bool ACodec::BaseState::onOMXFrameRendered( 5498 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5499 // ignore outside of Executing and PortSettingsChanged states 5500 return true; 5501} 5502 5503bool ACodec::BaseState::onOMXEvent( 5504 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5505 if (event == OMX_EventDataSpaceChanged) { 5506 ColorAspects aspects = ColorUtils::unpackToColorAspects(data2); 5507 5508 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5509 return true; 5510 } 5511 5512 if (event != OMX_EventError) { 5513 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5514 mCodec->mComponentName.c_str(), event, data1, data2); 5515 5516 return false; 5517 } 5518 5519 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5520 5521 // verify OMX component sends back an error we expect. 5522 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5523 if (!isOMXError(omxError)) { 5524 ALOGW("Invalid OMX error %#x", omxError); 5525 omxError = OMX_ErrorUndefined; 5526 } 5527 mCodec->signalError(omxError); 5528 5529 return true; 5530} 5531 5532bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5533 ALOGV("[%s] onOMXEmptyBufferDone %u", 5534 mCodec->mComponentName.c_str(), bufferID); 5535 5536 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5537 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5538 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5539 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5540 mCodec->dumpBuffers(kPortIndexInput); 5541 if (fenceFd >= 0) { 5542 ::close(fenceFd); 5543 } 5544 return false; 5545 } 5546 info->mStatus = BufferInfo::OWNED_BY_US; 5547 5548 // input buffers cannot take fences, so wait for any fence now 5549 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5550 fenceFd = -1; 5551 5552 // still save fence for completeness 5553 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5554 5555 // We're in "store-metadata-in-buffers" mode, the underlying 5556 // OMX component had access to data that's implicitly refcounted 5557 // by this "MediaBuffer" object. Now that the OMX component has 5558 // told us that it's done with the input buffer, we can decrement 5559 // the mediaBuffer's reference count. 5560 info->mData->setMediaBufferBase(NULL); 5561 5562 PortMode mode = getPortMode(kPortIndexInput); 5563 5564 switch (mode) { 5565 case KEEP_BUFFERS: 5566 break; 5567 5568 case RESUBMIT_BUFFERS: 5569 postFillThisBuffer(info); 5570 break; 5571 5572 case FREE_BUFFERS: 5573 default: 5574 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5575 return false; 5576 } 5577 5578 return true; 5579} 5580 5581void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5582 if (mCodec->mPortEOS[kPortIndexInput]) { 5583 return; 5584 } 5585 5586 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5587 5588 sp<AMessage> notify = mCodec->mNotify->dup(); 5589 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5590 notify->setInt32("buffer-id", info->mBufferID); 5591 5592 info->mData->meta()->clear(); 5593 notify->setObject("buffer", info->mData); 5594 5595 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5596 reply->setInt32("buffer-id", info->mBufferID); 5597 5598 notify->setMessage("reply", reply); 5599 5600 notify->post(); 5601 5602 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5603} 5604 5605void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5606 IOMX::buffer_id bufferID; 5607 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5608 sp<MediaCodecBuffer> buffer; 5609 int32_t err = OK; 5610 bool eos = false; 5611 PortMode mode = getPortMode(kPortIndexInput); 5612 5613 sp<RefBase> obj; 5614 if (!msg->findObject("buffer", &obj)) { 5615 /* these are unfilled buffers returned by client */ 5616 CHECK(msg->findInt32("err", &err)); 5617 5618 if (err == OK) { 5619 /* buffers with no errors are returned on MediaCodec.flush */ 5620 mode = KEEP_BUFFERS; 5621 } else { 5622 ALOGV("[%s] saw error %d instead of an input buffer", 5623 mCodec->mComponentName.c_str(), err); 5624 eos = true; 5625 } 5626 5627 buffer.clear(); 5628 } else { 5629 buffer = static_cast<MediaCodecBuffer *>(obj.get()); 5630 } 5631 5632 int32_t tmp; 5633 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5634 eos = true; 5635 err = ERROR_END_OF_STREAM; 5636 } 5637 5638 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5639 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5640 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5641 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5642 mCodec->dumpBuffers(kPortIndexInput); 5643 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5644 return; 5645 } 5646 5647 info->mStatus = BufferInfo::OWNED_BY_US; 5648 5649 switch (mode) { 5650 case KEEP_BUFFERS: 5651 { 5652 if (eos) { 5653 if (!mCodec->mPortEOS[kPortIndexInput]) { 5654 mCodec->mPortEOS[kPortIndexInput] = true; 5655 mCodec->mInputEOSResult = err; 5656 } 5657 } 5658 break; 5659 } 5660 5661 case RESUBMIT_BUFFERS: 5662 { 5663 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5664 // Do not send empty input buffer w/o EOS to the component. 5665 if (buffer->size() == 0 && !eos) { 5666 postFillThisBuffer(info); 5667 break; 5668 } 5669 5670 int64_t timeUs; 5671 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5672 5673 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5674 5675 MetadataBufferType metaType = mCodec->mInputMetadataType; 5676 int32_t isCSD = 0; 5677 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5678 if (mCodec->mIsLegacyVP9Decoder) { 5679 ALOGV("[%s] is legacy VP9 decoder. Ignore %u codec specific data", 5680 mCodec->mComponentName.c_str(), bufferID); 5681 postFillThisBuffer(info); 5682 break; 5683 } 5684 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5685 metaType = kMetadataBufferTypeInvalid; 5686 } 5687 5688 if (eos) { 5689 flags |= OMX_BUFFERFLAG_EOS; 5690 } 5691 5692 if (buffer != info->mCodecData) { 5693 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5694 mCodec->mComponentName.c_str(), 5695 bufferID, 5696 buffer.get(), info->mCodecData.get()); 5697 5698 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; 5699 if (converter == NULL || isCSD) { 5700 converter = getCopyConverter(); 5701 } 5702 status_t err = converter->convert(buffer, info->mCodecData); 5703 if (err != OK) { 5704 mCodec->signalError(OMX_ErrorUndefined, err); 5705 return; 5706 } 5707 } 5708 5709 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5710 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5711 mCodec->mComponentName.c_str(), bufferID); 5712 } else if (flags & OMX_BUFFERFLAG_EOS) { 5713 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5714 mCodec->mComponentName.c_str(), bufferID); 5715 } else { 5716#if TRACK_BUFFER_TIMING 5717 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5718 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5719#else 5720 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5721 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5722#endif 5723 } 5724 5725#if TRACK_BUFFER_TIMING 5726 ACodec::BufferStats stats; 5727 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5728 stats.mFillBufferDoneTimeUs = -1ll; 5729 mCodec->mBufferStats.add(timeUs, stats); 5730#endif 5731 5732 if (mCodec->storingMetadataInDecodedBuffers()) { 5733 // try to submit an output buffer for each input buffer 5734 PortMode outputMode = getPortMode(kPortIndexOutput); 5735 5736 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 5737 mCodec->mMetadataBuffersToSubmit, 5738 (outputMode == FREE_BUFFERS ? "FREE" : 5739 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 5740 if (outputMode == RESUBMIT_BUFFERS) { 5741 mCodec->submitOutputMetadataBuffer(); 5742 } 5743 } 5744 info->checkReadFence("onInputBufferFilled"); 5745 5746 status_t err2 = OK; 5747 switch (metaType) { 5748 case kMetadataBufferTypeInvalid: 5749 break; 5750#ifndef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5751 case kMetadataBufferTypeNativeHandleSource: 5752 if (info->mCodecData->size() >= sizeof(VideoNativeHandleMetadata)) { 5753 VideoNativeHandleMetadata *vnhmd = 5754 (VideoNativeHandleMetadata*)info->mCodecData->base(); 5755 err2 = mCodec->mOMXNode->updateNativeHandleInMeta( 5756 mCodec->kPortIndexInput, 5757 NativeHandle::create(vnhmd->pHandle, false /* ownsHandle */), 5758 bufferID); 5759 } 5760 break; 5761 case kMetadataBufferTypeANWBuffer: 5762 if (info->mCodecData->size() >= sizeof(VideoNativeMetadata)) { 5763 VideoNativeMetadata *vnmd = (VideoNativeMetadata*)info->mCodecData->base(); 5764 err2 = mCodec->mOMXNode->updateGraphicBufferInMeta( 5765 mCodec->kPortIndexInput, 5766 new GraphicBuffer(vnmd->pBuffer, false /* keepOwnership */), 5767 bufferID); 5768 } 5769 break; 5770#endif 5771 default: 5772 ALOGW("Can't marshall %s data in %zu sized buffers in %zu-bit mode", 5773 asString(metaType), info->mCodecData->size(), 5774 sizeof(buffer_handle_t) * 8); 5775 err2 = ERROR_UNSUPPORTED; 5776 break; 5777 } 5778 5779 if (err2 == OK) { 5780 err2 = mCodec->mOMXNode->emptyBuffer( 5781 bufferID, 5782 0, 5783 info->mCodecData->size(), 5784 flags, 5785 timeUs, 5786 info->mFenceFd); 5787 } 5788 info->mFenceFd = -1; 5789 if (err2 != OK) { 5790 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5791 return; 5792 } 5793 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5794 5795 if (!eos && err == OK) { 5796 getMoreInputDataIfPossible(); 5797 } else { 5798 ALOGV("[%s] Signalled EOS (%d) on the input port", 5799 mCodec->mComponentName.c_str(), err); 5800 5801 mCodec->mPortEOS[kPortIndexInput] = true; 5802 mCodec->mInputEOSResult = err; 5803 } 5804 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 5805 if (err != OK && err != ERROR_END_OF_STREAM) { 5806 ALOGV("[%s] Signalling EOS on the input port due to error %d", 5807 mCodec->mComponentName.c_str(), err); 5808 } else { 5809 ALOGV("[%s] Signalling EOS on the input port", 5810 mCodec->mComponentName.c_str()); 5811 } 5812 5813 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 5814 mCodec->mComponentName.c_str(), bufferID); 5815 5816 info->checkReadFence("onInputBufferFilled"); 5817 status_t err2 = mCodec->mOMXNode->emptyBuffer( 5818 bufferID, 5819 0, 5820 0, 5821 OMX_BUFFERFLAG_EOS, 5822 0, 5823 info->mFenceFd); 5824 info->mFenceFd = -1; 5825 if (err2 != OK) { 5826 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5827 return; 5828 } 5829 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5830 5831 mCodec->mPortEOS[kPortIndexInput] = true; 5832 mCodec->mInputEOSResult = err; 5833 } 5834 break; 5835 } 5836 5837 case FREE_BUFFERS: 5838 break; 5839 5840 default: 5841 ALOGE("invalid port mode: %d", mode); 5842 break; 5843 } 5844} 5845 5846void ACodec::BaseState::getMoreInputDataIfPossible() { 5847 if (mCodec->mPortEOS[kPortIndexInput]) { 5848 return; 5849 } 5850 5851 BufferInfo *eligible = NULL; 5852 5853 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 5854 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5855 5856#if 0 5857 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 5858 // There's already a "read" pending. 5859 return; 5860 } 5861#endif 5862 5863 if (info->mStatus == BufferInfo::OWNED_BY_US) { 5864 eligible = info; 5865 } 5866 } 5867 5868 if (eligible == NULL) { 5869 return; 5870 } 5871 5872 postFillThisBuffer(eligible); 5873} 5874 5875bool ACodec::BaseState::onOMXFillBufferDone( 5876 IOMX::buffer_id bufferID, 5877 size_t rangeOffset, size_t rangeLength, 5878 OMX_U32 flags, 5879 int64_t timeUs, 5880 int fenceFd) { 5881 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 5882 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 5883 5884 ssize_t index; 5885 status_t err= OK; 5886 5887#if TRACK_BUFFER_TIMING 5888 index = mCodec->mBufferStats.indexOfKey(timeUs); 5889 if (index >= 0) { 5890 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 5891 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 5892 5893 ALOGI("frame PTS %lld: %lld", 5894 timeUs, 5895 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 5896 5897 mCodec->mBufferStats.removeItemsAt(index); 5898 stats = NULL; 5899 } 5900#endif 5901 5902 BufferInfo *info = 5903 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5904 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5905 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5906 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5907 mCodec->dumpBuffers(kPortIndexOutput); 5908 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5909 if (fenceFd >= 0) { 5910 ::close(fenceFd); 5911 } 5912 return true; 5913 } 5914 5915 info->mDequeuedAt = ++mCodec->mDequeueCounter; 5916 info->mStatus = BufferInfo::OWNED_BY_US; 5917 5918 if (info->mRenderInfo != NULL) { 5919 // The fence for an emptied buffer must have signaled, but there still could be queued 5920 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 5921 // as we will soon requeue this buffer to the surface. While in theory we could still keep 5922 // track of buffers that are requeued to the surface, it is better to add support to the 5923 // buffer-queue to notify us of released buffers and their fences (in the future). 5924 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 5925 } 5926 5927 // byte buffers cannot take fences, so wait for any fence now 5928 if (mCodec->mNativeWindow == NULL) { 5929 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 5930 fenceFd = -1; 5931 } 5932 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 5933 5934 PortMode mode = getPortMode(kPortIndexOutput); 5935 5936 switch (mode) { 5937 case KEEP_BUFFERS: 5938 break; 5939 5940 case RESUBMIT_BUFFERS: 5941 { 5942 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 5943 || mCodec->mPortEOS[kPortIndexOutput])) { 5944 ALOGV("[%s] calling fillBuffer %u", 5945 mCodec->mComponentName.c_str(), info->mBufferID); 5946 5947 err = mCodec->mOMXNode->fillBuffer(info->mBufferID, info->mFenceFd); 5948 info->mFenceFd = -1; 5949 if (err != OK) { 5950 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5951 return true; 5952 } 5953 5954 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5955 break; 5956 } 5957 5958 sp<AMessage> reply = 5959 new AMessage(kWhatOutputBufferDrained, mCodec); 5960 sp<MediaCodecBuffer> buffer = info->mData; 5961 5962 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 5963 // pretend that output format has changed on the first frame (we used to do this) 5964 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) { 5965 mCodec->onOutputFormatChanged(mCodec->mOutputFormat); 5966 } 5967 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 5968 mCodec->sendFormatChange(); 5969 } else if (rangeLength > 0 && mCodec->mNativeWindow != NULL) { 5970 // If potentially rendering onto a surface, always save key format data (crop & 5971 // data space) so that we can set it if and once the buffer is rendered. 5972 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 5973 } 5974 5975 if (mCodec->usingMetadataOnEncoderOutput()) { 5976 native_handle_t *handle = NULL; 5977 VideoNativeHandleMetadata &nativeMeta = 5978 *(VideoNativeHandleMetadata *)buffer->data(); 5979 if (buffer->size() >= sizeof(nativeMeta) 5980 && nativeMeta.eType == kMetadataBufferTypeNativeHandleSource) { 5981#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5982 // handle is only valid on 32-bit/mediaserver process 5983 handle = NULL; 5984#else 5985 handle = (native_handle_t *)nativeMeta.pHandle; 5986#endif 5987 } 5988 buffer->meta()->setPointer("handle", handle); 5989 buffer->meta()->setInt32("rangeOffset", rangeOffset); 5990 buffer->meta()->setInt32("rangeLength", rangeLength); 5991 } else if (buffer == info->mCodecData) { 5992 buffer->setRange(rangeOffset, rangeLength); 5993 } else { 5994 info->mCodecData->setRange(rangeOffset, rangeLength); 5995 // in this case we know that mConverter is not null 5996 status_t err = mCodec->mConverter[kPortIndexOutput]->convert( 5997 info->mCodecData, buffer); 5998 if (err != OK) { 5999 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6000 return true; 6001 } 6002 } 6003#if 0 6004 if (mCodec->mNativeWindow == NULL) { 6005 if (IsIDR(info->mData)) { 6006 ALOGI("IDR frame"); 6007 } 6008 } 6009#endif 6010 6011 if (mCodec->mSkipCutBuffer != NULL) { 6012 mCodec->mSkipCutBuffer->submit(buffer); 6013 } 6014 buffer->meta()->setInt64("timeUs", timeUs); 6015 6016 sp<AMessage> notify = mCodec->mNotify->dup(); 6017 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 6018 notify->setInt32("buffer-id", info->mBufferID); 6019 notify->setObject("buffer", buffer); 6020 notify->setInt32("flags", flags); 6021 6022 reply->setInt32("buffer-id", info->mBufferID); 6023 6024 notify->setMessage("reply", reply); 6025 6026 notify->post(); 6027 6028 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 6029 6030 if (flags & OMX_BUFFERFLAG_EOS) { 6031 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 6032 6033 sp<AMessage> notify = mCodec->mNotify->dup(); 6034 notify->setInt32("what", CodecBase::kWhatEOS); 6035 notify->setInt32("err", mCodec->mInputEOSResult); 6036 notify->post(); 6037 6038 mCodec->mPortEOS[kPortIndexOutput] = true; 6039 } 6040 break; 6041 } 6042 6043 case FREE_BUFFERS: 6044 err = mCodec->freeBuffer(kPortIndexOutput, index); 6045 if (err != OK) { 6046 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6047 return true; 6048 } 6049 break; 6050 6051 default: 6052 ALOGE("Invalid port mode: %d", mode); 6053 return false; 6054 } 6055 6056 return true; 6057} 6058 6059void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 6060 IOMX::buffer_id bufferID; 6061 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 6062 sp<RefBase> obj; 6063 sp<MediaCodecBuffer> buffer = nullptr; 6064 if (msg->findObject("buffer", &obj)) { 6065 buffer = static_cast<MediaCodecBuffer *>(obj.get()); 6066 } 6067 ssize_t index; 6068 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6069 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6070 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 6071 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6072 mCodec->dumpBuffers(kPortIndexOutput); 6073 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6074 return; 6075 } 6076 6077 android_native_rect_t crop; 6078 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom) 6079 && memcmp(&crop, &mCodec->mLastNativeWindowCrop, sizeof(crop)) != 0) { 6080 mCodec->mLastNativeWindowCrop = crop; 6081 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 6082 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 6083 } 6084 6085 int32_t dataSpace; 6086 if (msg->findInt32("dataspace", &dataSpace) 6087 && dataSpace != mCodec->mLastNativeWindowDataSpace) { 6088 status_t err = native_window_set_buffers_data_space( 6089 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 6090 mCodec->mLastNativeWindowDataSpace = dataSpace; 6091 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 6092 } 6093 6094 int32_t render; 6095 if (mCodec->mNativeWindow != NULL 6096 && msg->findInt32("render", &render) && render != 0 6097 && buffer != NULL && buffer->size() != 0) { 6098 ATRACE_NAME("render"); 6099 // The client wants this buffer to be rendered. 6100 6101 // save buffers sent to the surface so we can get render time when they return 6102 int64_t mediaTimeUs = -1; 6103 buffer->meta()->findInt64("timeUs", &mediaTimeUs); 6104 if (mediaTimeUs >= 0) { 6105 mCodec->mRenderTracker.onFrameQueued( 6106 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 6107 } 6108 6109 int64_t timestampNs = 0; 6110 if (!msg->findInt64("timestampNs", ×tampNs)) { 6111 // use media timestamp if client did not request a specific render timestamp 6112 if (buffer->meta()->findInt64("timeUs", ×tampNs)) { 6113 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 6114 timestampNs *= 1000; 6115 } 6116 } 6117 6118 status_t err; 6119 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 6120 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 6121 6122 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 6123 err = mCodec->mNativeWindow->queueBuffer( 6124 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 6125 info->mFenceFd = -1; 6126 if (err == OK) { 6127 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 6128 } else { 6129 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 6130 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6131 info->mStatus = BufferInfo::OWNED_BY_US; 6132 // keeping read fence as write fence to avoid clobbering 6133 info->mIsReadFence = false; 6134 } 6135 } else { 6136 if (mCodec->mNativeWindow != NULL && 6137 (buffer == NULL || buffer->size() != 0)) { 6138 // move read fence into write fence to avoid clobbering 6139 info->mIsReadFence = false; 6140 ATRACE_NAME("frame-drop"); 6141 } 6142 info->mStatus = BufferInfo::OWNED_BY_US; 6143 } 6144 6145 PortMode mode = getPortMode(kPortIndexOutput); 6146 6147 switch (mode) { 6148 case KEEP_BUFFERS: 6149 { 6150 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 6151 6152 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6153 // We cannot resubmit the buffer we just rendered, dequeue 6154 // the spare instead. 6155 6156 info = mCodec->dequeueBufferFromNativeWindow(); 6157 } 6158 break; 6159 } 6160 6161 case RESUBMIT_BUFFERS: 6162 { 6163 if (!mCodec->mPortEOS[kPortIndexOutput]) { 6164 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6165 // We cannot resubmit the buffer we just rendered, dequeue 6166 // the spare instead. 6167 6168 info = mCodec->dequeueBufferFromNativeWindow(); 6169 } 6170 6171 if (info != NULL) { 6172 ALOGV("[%s] calling fillBuffer %u", 6173 mCodec->mComponentName.c_str(), info->mBufferID); 6174 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 6175 status_t err = mCodec->mOMXNode->fillBuffer( 6176 info->mBufferID, info->mFenceFd); 6177 info->mFenceFd = -1; 6178 if (err == OK) { 6179 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6180 } else { 6181 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6182 } 6183 } 6184 } 6185 break; 6186 } 6187 6188 case FREE_BUFFERS: 6189 { 6190 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 6191 if (err != OK) { 6192 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6193 } 6194 break; 6195 } 6196 6197 default: 6198 ALOGE("Invalid port mode: %d", mode); 6199 return; 6200 } 6201} 6202 6203//////////////////////////////////////////////////////////////////////////////// 6204 6205ACodec::UninitializedState::UninitializedState(ACodec *codec) 6206 : BaseState(codec) { 6207} 6208 6209void ACodec::UninitializedState::stateEntered() { 6210 ALOGV("Now uninitialized"); 6211 6212 if (mDeathNotifier != NULL) { 6213 if (mCodec->mOMXNode != NULL) { 6214 sp<IBinder> binder = IInterface::asBinder(mCodec->mOMXNode); 6215 binder->unlinkToDeath(mDeathNotifier); 6216 } 6217 mDeathNotifier.clear(); 6218 } 6219 6220 mCodec->mUsingNativeWindow = false; 6221 mCodec->mNativeWindow.clear(); 6222 mCodec->mNativeWindowUsageBits = 0; 6223 mCodec->mOMX.clear(); 6224 mCodec->mOMXNode.clear(); 6225 mCodec->mFlags = 0; 6226 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 6227 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 6228 mCodec->mConverter[0].clear(); 6229 mCodec->mConverter[1].clear(); 6230 mCodec->mComponentName.clear(); 6231} 6232 6233bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 6234 bool handled = false; 6235 6236 switch (msg->what()) { 6237 case ACodec::kWhatSetup: 6238 { 6239 onSetup(msg); 6240 6241 handled = true; 6242 break; 6243 } 6244 6245 case ACodec::kWhatAllocateComponent: 6246 { 6247 onAllocateComponent(msg); 6248 handled = true; 6249 break; 6250 } 6251 6252 case ACodec::kWhatShutdown: 6253 { 6254 int32_t keepComponentAllocated; 6255 CHECK(msg->findInt32( 6256 "keepComponentAllocated", &keepComponentAllocated)); 6257 ALOGW_IF(keepComponentAllocated, 6258 "cannot keep component allocated on shutdown in Uninitialized state"); 6259 6260 sp<AMessage> notify = mCodec->mNotify->dup(); 6261 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6262 notify->post(); 6263 6264 handled = true; 6265 break; 6266 } 6267 6268 case ACodec::kWhatFlush: 6269 { 6270 sp<AMessage> notify = mCodec->mNotify->dup(); 6271 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6272 notify->post(); 6273 6274 handled = true; 6275 break; 6276 } 6277 6278 case ACodec::kWhatReleaseCodecInstance: 6279 { 6280 // nothing to do, as we have already signaled shutdown 6281 handled = true; 6282 break; 6283 } 6284 6285 default: 6286 return BaseState::onMessageReceived(msg); 6287 } 6288 6289 return handled; 6290} 6291 6292void ACodec::UninitializedState::onSetup( 6293 const sp<AMessage> &msg) { 6294 if (onAllocateComponent(msg) 6295 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6296 mCodec->mLoadedState->onStart(); 6297 } 6298} 6299 6300bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6301 ALOGV("onAllocateComponent"); 6302 6303 CHECK(mCodec->mOMXNode == NULL); 6304 6305 OMXClient client; 6306 if (client.connect() != OK) { 6307 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6308 return false; 6309 } 6310 6311 sp<IOMX> omx = client.interface(); 6312 6313 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6314 6315 Vector<AString> matchingCodecs; 6316 6317 AString mime; 6318 6319 AString componentName; 6320 uint32_t quirks = 0; 6321 int32_t encoder = false; 6322 if (msg->findString("componentName", &componentName)) { 6323 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6324 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6325 matchingCodecs.add(componentName); 6326 } 6327 } else { 6328 CHECK(msg->findString("mime", &mime)); 6329 6330 if (!msg->findInt32("encoder", &encoder)) { 6331 encoder = false; 6332 } 6333 6334 MediaCodecList::findMatchingCodecs( 6335 mime.c_str(), 6336 encoder, // createEncoder 6337 0, // flags 6338 &matchingCodecs); 6339 } 6340 6341 sp<CodecObserver> observer = new CodecObserver; 6342 sp<IOMXNode> omxNode; 6343 6344 status_t err = NAME_NOT_FOUND; 6345 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6346 ++matchIndex) { 6347 componentName = matchingCodecs[matchIndex]; 6348 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6349 6350 pid_t tid = gettid(); 6351 int prevPriority = androidGetThreadPriority(tid); 6352 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6353 err = omx->allocateNode(componentName.c_str(), observer, &omxNode); 6354 androidSetThreadPriority(tid, prevPriority); 6355 6356 if (err == OK) { 6357 break; 6358 } else { 6359 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6360 } 6361 6362 omxNode = NULL; 6363 } 6364 6365 if (omxNode == NULL) { 6366 if (!mime.empty()) { 6367 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6368 encoder ? "en" : "de", mime.c_str(), err); 6369 } else { 6370 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6371 } 6372 6373 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6374 return false; 6375 } 6376 6377 mDeathNotifier = new DeathNotifier(notify); 6378 if (IInterface::asBinder(omxNode)->linkToDeath(mDeathNotifier) != OK) { 6379 // This was a local binder, if it dies so do we, we won't care 6380 // about any notifications in the afterlife. 6381 mDeathNotifier.clear(); 6382 } 6383 6384 notify = new AMessage(kWhatOMXMessageList, mCodec); 6385 notify->setInt32("generation", ++mCodec->mNodeGeneration); 6386 observer->setNotificationMessage(notify); 6387 6388 mCodec->mComponentName = componentName; 6389 mCodec->mRenderTracker.setComponentName(componentName); 6390 mCodec->mFlags = 0; 6391 6392 if (componentName.endsWith(".secure")) { 6393 mCodec->mFlags |= kFlagIsSecure; 6394 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6395 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6396 } 6397 6398 omxNode->setQuirks(quirks); 6399 mCodec->mOMX = omx; 6400 mCodec->mOMXNode = omxNode; 6401 6402 { 6403 sp<AMessage> notify = mCodec->mNotify->dup(); 6404 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6405 notify->setString("componentName", mCodec->mComponentName.c_str()); 6406 notify->post(); 6407 } 6408 6409 mCodec->changeState(mCodec->mLoadedState); 6410 6411 return true; 6412} 6413 6414//////////////////////////////////////////////////////////////////////////////// 6415 6416ACodec::LoadedState::LoadedState(ACodec *codec) 6417 : BaseState(codec) { 6418} 6419 6420void ACodec::LoadedState::stateEntered() { 6421 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6422 6423 mCodec->mPortEOS[kPortIndexInput] = 6424 mCodec->mPortEOS[kPortIndexOutput] = false; 6425 6426 mCodec->mInputEOSResult = OK; 6427 6428 mCodec->mDequeueCounter = 0; 6429 mCodec->mMetadataBuffersToSubmit = 0; 6430 mCodec->mRepeatFrameDelayUs = -1ll; 6431 mCodec->mInputFormat.clear(); 6432 mCodec->mOutputFormat.clear(); 6433 mCodec->mBaseOutputFormat.clear(); 6434 mCodec->mGraphicBufferSource.clear(); 6435 6436 if (mCodec->mShutdownInProgress) { 6437 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6438 6439 mCodec->mShutdownInProgress = false; 6440 mCodec->mKeepComponentAllocated = false; 6441 6442 onShutdown(keepComponentAllocated); 6443 } 6444 mCodec->mExplicitShutdown = false; 6445 6446 mCodec->processDeferredMessages(); 6447} 6448 6449void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6450 if (!keepComponentAllocated) { 6451 (void)mCodec->mOMXNode->freeNode(); 6452 6453 mCodec->changeState(mCodec->mUninitializedState); 6454 } 6455 6456 if (mCodec->mExplicitShutdown) { 6457 sp<AMessage> notify = mCodec->mNotify->dup(); 6458 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6459 notify->post(); 6460 mCodec->mExplicitShutdown = false; 6461 } 6462} 6463 6464bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6465 bool handled = false; 6466 6467 switch (msg->what()) { 6468 case ACodec::kWhatConfigureComponent: 6469 { 6470 onConfigureComponent(msg); 6471 handled = true; 6472 break; 6473 } 6474 6475 case ACodec::kWhatCreateInputSurface: 6476 { 6477 onCreateInputSurface(msg); 6478 handled = true; 6479 break; 6480 } 6481 6482 case ACodec::kWhatSetInputSurface: 6483 { 6484 onSetInputSurface(msg); 6485 handled = true; 6486 break; 6487 } 6488 6489 case ACodec::kWhatStart: 6490 { 6491 onStart(); 6492 handled = true; 6493 break; 6494 } 6495 6496 case ACodec::kWhatShutdown: 6497 { 6498 int32_t keepComponentAllocated; 6499 CHECK(msg->findInt32( 6500 "keepComponentAllocated", &keepComponentAllocated)); 6501 6502 mCodec->mExplicitShutdown = true; 6503 onShutdown(keepComponentAllocated); 6504 6505 handled = true; 6506 break; 6507 } 6508 6509 case ACodec::kWhatFlush: 6510 { 6511 sp<AMessage> notify = mCodec->mNotify->dup(); 6512 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6513 notify->post(); 6514 6515 handled = true; 6516 break; 6517 } 6518 6519 default: 6520 return BaseState::onMessageReceived(msg); 6521 } 6522 6523 return handled; 6524} 6525 6526bool ACodec::LoadedState::onConfigureComponent( 6527 const sp<AMessage> &msg) { 6528 ALOGV("onConfigureComponent"); 6529 6530 CHECK(mCodec->mOMXNode != NULL); 6531 6532 status_t err = OK; 6533 AString mime; 6534 if (!msg->findString("mime", &mime)) { 6535 err = BAD_VALUE; 6536 } else { 6537 err = mCodec->configureCodec(mime.c_str(), msg); 6538 } 6539 if (err != OK) { 6540 ALOGE("[%s] configureCodec returning error %d", 6541 mCodec->mComponentName.c_str(), err); 6542 6543 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6544 return false; 6545 } 6546 6547 { 6548 sp<AMessage> notify = mCodec->mNotify->dup(); 6549 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6550 notify->setMessage("input-format", mCodec->mInputFormat); 6551 notify->setMessage("output-format", mCodec->mOutputFormat); 6552 notify->post(); 6553 } 6554 6555 return true; 6556} 6557 6558status_t ACodec::LoadedState::setupInputSurface() { 6559 if (mCodec->mGraphicBufferSource == NULL) { 6560 return BAD_VALUE; 6561 } 6562 6563 android_dataspace dataSpace; 6564 status_t err = 6565 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6566 if (err != OK) { 6567 ALOGE("Failed to get default data space"); 6568 return err; 6569 } 6570 6571 err = statusFromBinderStatus( 6572 mCodec->mGraphicBufferSource->configure(mCodec->mOMXNode, dataSpace)); 6573 if (err != OK) { 6574 ALOGE("[%s] Unable to configure for node (err %d)", 6575 mCodec->mComponentName.c_str(), err); 6576 return err; 6577 } 6578 6579 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6580 err = statusFromBinderStatus( 6581 mCodec->mGraphicBufferSource->setRepeatPreviousFrameDelayUs( 6582 mCodec->mRepeatFrameDelayUs)); 6583 6584 if (err != OK) { 6585 ALOGE("[%s] Unable to configure option to repeat previous " 6586 "frames (err %d)", 6587 mCodec->mComponentName.c_str(), err); 6588 return err; 6589 } 6590 } 6591 6592 if (mCodec->mMaxPtsGapUs > 0ll) { 6593 err = statusFromBinderStatus( 6594 mCodec->mGraphicBufferSource->setMaxTimestampGapUs( 6595 mCodec->mMaxPtsGapUs)); 6596 6597 if (err != OK) { 6598 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6599 mCodec->mComponentName.c_str(), err); 6600 return err; 6601 } 6602 } 6603 6604 if (mCodec->mMaxFps > 0) { 6605 err = statusFromBinderStatus( 6606 mCodec->mGraphicBufferSource->setMaxFps(mCodec->mMaxFps)); 6607 6608 if (err != OK) { 6609 ALOGE("[%s] Unable to configure max fps (err %d)", 6610 mCodec->mComponentName.c_str(), err); 6611 return err; 6612 } 6613 } 6614 6615 if (mCodec->mTimePerCaptureUs > 0ll 6616 && mCodec->mTimePerFrameUs > 0ll) { 6617 err = statusFromBinderStatus( 6618 mCodec->mGraphicBufferSource->setTimeLapseConfig( 6619 mCodec->mTimePerFrameUs, mCodec->mTimePerCaptureUs)); 6620 6621 if (err != OK) { 6622 ALOGE("[%s] Unable to configure time lapse (err %d)", 6623 mCodec->mComponentName.c_str(), err); 6624 return err; 6625 } 6626 } 6627 6628 if (mCodec->mCreateInputBuffersSuspended) { 6629 err = statusFromBinderStatus( 6630 mCodec->mGraphicBufferSource->setSuspend(true)); 6631 6632 if (err != OK) { 6633 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6634 mCodec->mComponentName.c_str(), err); 6635 return err; 6636 } 6637 } 6638 6639 uint32_t usageBits; 6640 if (mCodec->mOMXNode->getParameter( 6641 (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6642 &usageBits, sizeof(usageBits)) == OK) { 6643 mCodec->mInputFormat->setInt32( 6644 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6645 } 6646 6647 sp<ABuffer> colorAspectsBuffer; 6648 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6649 if (colorAspectsBuffer->size() != sizeof(ColorAspects)) { 6650 return INVALID_OPERATION; 6651 } 6652 6653 err = statusFromBinderStatus( 6654 mCodec->mGraphicBufferSource->setColorAspects(ColorUtils::packToU32( 6655 *(ColorAspects *)colorAspectsBuffer->base()))); 6656 6657 if (err != OK) { 6658 ALOGE("[%s] Unable to configure color aspects (err %d)", 6659 mCodec->mComponentName.c_str(), err); 6660 return err; 6661 } 6662 } 6663 return OK; 6664} 6665 6666void ACodec::LoadedState::onCreateInputSurface( 6667 const sp<AMessage> & /* msg */) { 6668 ALOGV("onCreateInputSurface"); 6669 6670 sp<AMessage> notify = mCodec->mNotify->dup(); 6671 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6672 6673 sp<IGraphicBufferProducer> bufferProducer; 6674 status_t err = mCodec->mOMX->createInputSurface( 6675 &bufferProducer, &mCodec->mGraphicBufferSource); 6676 6677 if (err == OK) { 6678 err = setupInputSurface(); 6679 } 6680 6681 if (err == OK) { 6682 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6683 6684 notify->setMessage("input-format", mCodec->mInputFormat); 6685 notify->setMessage("output-format", mCodec->mOutputFormat); 6686 6687 notify->setObject("input-surface", 6688 new BufferProducerWrapper(bufferProducer)); 6689 } else { 6690 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6691 // the error through because it's in the "configured" state. We 6692 // send a kWhatInputSurfaceCreated with an error value instead. 6693 ALOGE("[%s] onCreateInputSurface returning error %d", 6694 mCodec->mComponentName.c_str(), err); 6695 notify->setInt32("err", err); 6696 } 6697 notify->post(); 6698} 6699 6700void ACodec::LoadedState::onSetInputSurface( 6701 const sp<AMessage> &msg) { 6702 ALOGV("onSetInputSurface"); 6703 6704 sp<AMessage> notify = mCodec->mNotify->dup(); 6705 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6706 6707 sp<RefBase> obj; 6708 CHECK(msg->findObject("input-surface", &obj)); 6709 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6710 mCodec->mGraphicBufferSource = surface->getBufferSource(); 6711 6712 status_t err = setupInputSurface(); 6713 6714 if (err == OK) { 6715 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6716 6717 notify->setMessage("input-format", mCodec->mInputFormat); 6718 notify->setMessage("output-format", mCodec->mOutputFormat); 6719 } else { 6720 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6721 // the error through because it's in the "configured" state. We 6722 // send a kWhatInputSurfaceAccepted with an error value instead. 6723 ALOGE("[%s] onSetInputSurface returning error %d", 6724 mCodec->mComponentName.c_str(), err); 6725 notify->setInt32("err", err); 6726 } 6727 notify->post(); 6728} 6729 6730void ACodec::LoadedState::onStart() { 6731 ALOGV("onStart"); 6732 6733 status_t err = mCodec->mOMXNode->sendCommand(OMX_CommandStateSet, OMX_StateIdle); 6734 if (err != OK) { 6735 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6736 } else { 6737 mCodec->changeState(mCodec->mLoadedToIdleState); 6738 } 6739} 6740 6741//////////////////////////////////////////////////////////////////////////////// 6742 6743ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 6744 : BaseState(codec) { 6745} 6746 6747void ACodec::LoadedToIdleState::stateEntered() { 6748 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 6749 6750 status_t err; 6751 if ((err = allocateBuffers()) != OK) { 6752 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 6753 "(error 0x%08x)", 6754 err); 6755 6756 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6757 6758 mCodec->mOMXNode->sendCommand( 6759 OMX_CommandStateSet, OMX_StateLoaded); 6760 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 6761 mCodec->freeBuffersOnPort(kPortIndexInput); 6762 } 6763 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 6764 mCodec->freeBuffersOnPort(kPortIndexOutput); 6765 } 6766 6767 mCodec->changeState(mCodec->mLoadedState); 6768 } 6769} 6770 6771status_t ACodec::LoadedToIdleState::allocateBuffers() { 6772 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 6773 6774 if (err != OK) { 6775 return err; 6776 } 6777 6778 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 6779} 6780 6781bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 6782 switch (msg->what()) { 6783 case kWhatSetParameters: 6784 case kWhatShutdown: 6785 { 6786 mCodec->deferMessage(msg); 6787 return true; 6788 } 6789 6790 case kWhatSignalEndOfInputStream: 6791 { 6792 mCodec->onSignalEndOfInputStream(); 6793 return true; 6794 } 6795 6796 case kWhatResume: 6797 { 6798 // We'll be active soon enough. 6799 return true; 6800 } 6801 6802 case kWhatFlush: 6803 { 6804 // We haven't even started yet, so we're flushed alright... 6805 sp<AMessage> notify = mCodec->mNotify->dup(); 6806 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6807 notify->post(); 6808 return true; 6809 } 6810 6811 default: 6812 return BaseState::onMessageReceived(msg); 6813 } 6814} 6815 6816bool ACodec::LoadedToIdleState::onOMXEvent( 6817 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6818 switch (event) { 6819 case OMX_EventCmdComplete: 6820 { 6821 status_t err = OK; 6822 if (data1 != (OMX_U32)OMX_CommandStateSet 6823 || data2 != (OMX_U32)OMX_StateIdle) { 6824 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 6825 asString((OMX_COMMANDTYPE)data1), data1, 6826 asString((OMX_STATETYPE)data2), data2); 6827 err = FAILED_TRANSACTION; 6828 } 6829 6830 if (err == OK) { 6831 err = mCodec->mOMXNode->sendCommand( 6832 OMX_CommandStateSet, OMX_StateExecuting); 6833 } 6834 6835 if (err != OK) { 6836 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6837 } else { 6838 mCodec->changeState(mCodec->mIdleToExecutingState); 6839 } 6840 6841 return true; 6842 } 6843 6844 default: 6845 return BaseState::onOMXEvent(event, data1, data2); 6846 } 6847} 6848 6849//////////////////////////////////////////////////////////////////////////////// 6850 6851ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 6852 : BaseState(codec) { 6853} 6854 6855void ACodec::IdleToExecutingState::stateEntered() { 6856 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 6857} 6858 6859bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6860 switch (msg->what()) { 6861 case kWhatSetParameters: 6862 case kWhatShutdown: 6863 { 6864 mCodec->deferMessage(msg); 6865 return true; 6866 } 6867 6868 case kWhatResume: 6869 { 6870 // We'll be active soon enough. 6871 return true; 6872 } 6873 6874 case kWhatFlush: 6875 { 6876 // We haven't even started yet, so we're flushed alright... 6877 sp<AMessage> notify = mCodec->mNotify->dup(); 6878 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6879 notify->post(); 6880 6881 return true; 6882 } 6883 6884 case kWhatSignalEndOfInputStream: 6885 { 6886 mCodec->onSignalEndOfInputStream(); 6887 return true; 6888 } 6889 6890 default: 6891 return BaseState::onMessageReceived(msg); 6892 } 6893} 6894 6895bool ACodec::IdleToExecutingState::onOMXEvent( 6896 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6897 switch (event) { 6898 case OMX_EventCmdComplete: 6899 { 6900 if (data1 != (OMX_U32)OMX_CommandStateSet 6901 || data2 != (OMX_U32)OMX_StateExecuting) { 6902 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 6903 asString((OMX_COMMANDTYPE)data1), data1, 6904 asString((OMX_STATETYPE)data2), data2); 6905 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6906 return true; 6907 } 6908 6909 mCodec->mExecutingState->resume(); 6910 mCodec->changeState(mCodec->mExecutingState); 6911 6912 return true; 6913 } 6914 6915 default: 6916 return BaseState::onOMXEvent(event, data1, data2); 6917 } 6918} 6919 6920//////////////////////////////////////////////////////////////////////////////// 6921 6922ACodec::ExecutingState::ExecutingState(ACodec *codec) 6923 : BaseState(codec), 6924 mActive(false) { 6925} 6926 6927ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 6928 OMX_U32 /* portIndex */) { 6929 return RESUBMIT_BUFFERS; 6930} 6931 6932void ACodec::ExecutingState::submitOutputMetaBuffers() { 6933 // submit as many buffers as there are input buffers with the codec 6934 // in case we are in port reconfiguring 6935 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 6936 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6937 6938 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 6939 if (mCodec->submitOutputMetadataBuffer() != OK) 6940 break; 6941 } 6942 } 6943 6944 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 6945 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 6946} 6947 6948void ACodec::ExecutingState::submitRegularOutputBuffers() { 6949 bool failed = false; 6950 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 6951 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 6952 6953 if (mCodec->mNativeWindow != NULL) { 6954 if (info->mStatus != BufferInfo::OWNED_BY_US 6955 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6956 ALOGE("buffers should be owned by us or the surface"); 6957 failed = true; 6958 break; 6959 } 6960 6961 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6962 continue; 6963 } 6964 } else { 6965 if (info->mStatus != BufferInfo::OWNED_BY_US) { 6966 ALOGE("buffers should be owned by us"); 6967 failed = true; 6968 break; 6969 } 6970 } 6971 6972 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 6973 6974 info->checkWriteFence("submitRegularOutputBuffers"); 6975 status_t err = mCodec->mOMXNode->fillBuffer(info->mBufferID, info->mFenceFd); 6976 info->mFenceFd = -1; 6977 if (err != OK) { 6978 failed = true; 6979 break; 6980 } 6981 6982 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6983 } 6984 6985 if (failed) { 6986 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6987 } 6988} 6989 6990void ACodec::ExecutingState::submitOutputBuffers() { 6991 submitRegularOutputBuffers(); 6992 if (mCodec->storingMetadataInDecodedBuffers()) { 6993 submitOutputMetaBuffers(); 6994 } 6995} 6996 6997void ACodec::ExecutingState::resume() { 6998 if (mActive) { 6999 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 7000 return; 7001 } 7002 7003 submitOutputBuffers(); 7004 7005 // Post all available input buffers 7006 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 7007 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 7008 } 7009 7010 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 7011 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7012 if (info->mStatus == BufferInfo::OWNED_BY_US) { 7013 postFillThisBuffer(info); 7014 } 7015 } 7016 7017 mActive = true; 7018} 7019 7020void ACodec::ExecutingState::stateEntered() { 7021 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 7022 7023 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7024 mCodec->processDeferredMessages(); 7025} 7026 7027bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 7028 bool handled = false; 7029 7030 switch (msg->what()) { 7031 case kWhatShutdown: 7032 { 7033 int32_t keepComponentAllocated; 7034 CHECK(msg->findInt32( 7035 "keepComponentAllocated", &keepComponentAllocated)); 7036 7037 mCodec->mShutdownInProgress = true; 7038 mCodec->mExplicitShutdown = true; 7039 mCodec->mKeepComponentAllocated = keepComponentAllocated; 7040 7041 mActive = false; 7042 7043 status_t err = mCodec->mOMXNode->sendCommand( 7044 OMX_CommandStateSet, OMX_StateIdle); 7045 if (err != OK) { 7046 if (keepComponentAllocated) { 7047 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7048 } 7049 // TODO: do some recovery here. 7050 } else { 7051 mCodec->changeState(mCodec->mExecutingToIdleState); 7052 } 7053 7054 handled = true; 7055 break; 7056 } 7057 7058 case kWhatFlush: 7059 { 7060 ALOGV("[%s] ExecutingState flushing now " 7061 "(codec owns %zu/%zu input, %zu/%zu output).", 7062 mCodec->mComponentName.c_str(), 7063 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 7064 mCodec->mBuffers[kPortIndexInput].size(), 7065 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 7066 mCodec->mBuffers[kPortIndexOutput].size()); 7067 7068 mActive = false; 7069 7070 status_t err = mCodec->mOMXNode->sendCommand(OMX_CommandFlush, OMX_ALL); 7071 if (err != OK) { 7072 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7073 } else { 7074 mCodec->changeState(mCodec->mFlushingState); 7075 } 7076 7077 handled = true; 7078 break; 7079 } 7080 7081 case kWhatResume: 7082 { 7083 resume(); 7084 7085 handled = true; 7086 break; 7087 } 7088 7089 case kWhatRequestIDRFrame: 7090 { 7091 status_t err = mCodec->requestIDRFrame(); 7092 if (err != OK) { 7093 ALOGW("Requesting an IDR frame failed."); 7094 } 7095 7096 handled = true; 7097 break; 7098 } 7099 7100 case kWhatSetParameters: 7101 { 7102 sp<AMessage> params; 7103 CHECK(msg->findMessage("params", ¶ms)); 7104 7105 status_t err = mCodec->setParameters(params); 7106 7107 sp<AMessage> reply; 7108 if (msg->findMessage("reply", &reply)) { 7109 reply->setInt32("err", err); 7110 reply->post(); 7111 } 7112 7113 handled = true; 7114 break; 7115 } 7116 7117 case ACodec::kWhatSignalEndOfInputStream: 7118 { 7119 mCodec->onSignalEndOfInputStream(); 7120 handled = true; 7121 break; 7122 } 7123 7124 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7125 case kWhatSubmitOutputMetadataBufferIfEOS: 7126 { 7127 if (mCodec->mPortEOS[kPortIndexInput] && 7128 !mCodec->mPortEOS[kPortIndexOutput]) { 7129 status_t err = mCodec->submitOutputMetadataBuffer(); 7130 if (err == OK) { 7131 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7132 } 7133 } 7134 return true; 7135 } 7136 7137 default: 7138 handled = BaseState::onMessageReceived(msg); 7139 break; 7140 } 7141 7142 return handled; 7143} 7144 7145status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 7146 int32_t videoBitrate; 7147 if (params->findInt32("video-bitrate", &videoBitrate)) { 7148 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 7149 InitOMXParams(&configParams); 7150 configParams.nPortIndex = kPortIndexOutput; 7151 configParams.nEncodeBitrate = videoBitrate; 7152 7153 status_t err = mOMXNode->setConfig( 7154 OMX_IndexConfigVideoBitrate, 7155 &configParams, 7156 sizeof(configParams)); 7157 7158 if (err != OK) { 7159 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 7160 videoBitrate, err); 7161 7162 return err; 7163 } 7164 } 7165 7166 int64_t timeOffsetUs; 7167 if (params->findInt64("time-offset-us", &timeOffsetUs)) { 7168 if (mGraphicBufferSource == NULL) { 7169 ALOGE("[%s] Invalid to set input buffer time offset without surface", 7170 mComponentName.c_str()); 7171 return INVALID_OPERATION; 7172 } 7173 7174 status_t err = statusFromBinderStatus( 7175 mGraphicBufferSource->setTimeOffsetUs(timeOffsetUs)); 7176 7177 if (err != OK) { 7178 ALOGE("[%s] Unable to set input buffer time offset (err %d)", 7179 mComponentName.c_str(), 7180 err); 7181 return err; 7182 } 7183 } 7184 7185 int64_t skipFramesBeforeUs; 7186 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 7187 if (mGraphicBufferSource == NULL) { 7188 ALOGE("[%s] Invalid to set start time without surface", 7189 mComponentName.c_str()); 7190 return INVALID_OPERATION; 7191 } 7192 7193 status_t err = statusFromBinderStatus( 7194 mGraphicBufferSource->setStartTimeUs(skipFramesBeforeUs)); 7195 7196 if (err != OK) { 7197 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 7198 return err; 7199 } 7200 } 7201 7202 int32_t dropInputFrames; 7203 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 7204 if (mGraphicBufferSource == NULL) { 7205 ALOGE("[%s] Invalid to set suspend without surface", 7206 mComponentName.c_str()); 7207 return INVALID_OPERATION; 7208 } 7209 7210 status_t err = statusFromBinderStatus( 7211 mGraphicBufferSource->setSuspend(dropInputFrames != 0)); 7212 7213 if (err != OK) { 7214 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 7215 return err; 7216 } 7217 } 7218 7219 int32_t dummy; 7220 if (params->findInt32("request-sync", &dummy)) { 7221 status_t err = requestIDRFrame(); 7222 7223 if (err != OK) { 7224 ALOGE("Requesting a sync frame failed w/ err %d", err); 7225 return err; 7226 } 7227 } 7228 7229 float rate; 7230 if (params->findFloat("operating-rate", &rate) && rate > 0) { 7231 status_t err = setOperatingRate(rate, mIsVideo); 7232 if (err != OK) { 7233 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 7234 return err; 7235 } 7236 } 7237 7238 int32_t intraRefreshPeriod = 0; 7239 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 7240 && intraRefreshPeriod > 0) { 7241 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 7242 if (err != OK) { 7243 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 7244 mComponentName.c_str()); 7245 err = OK; 7246 } 7247 } 7248 7249 status_t err = configureTemporalLayers(params, false /* inConfigure */, mOutputFormat); 7250 if (err != OK) { 7251 err = OK; // ignore failure 7252 } 7253 7254 return err; 7255} 7256 7257void ACodec::onSignalEndOfInputStream() { 7258 sp<AMessage> notify = mNotify->dup(); 7259 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 7260 7261 status_t err = INVALID_OPERATION; 7262 if (mGraphicBufferSource != NULL) { 7263 err = statusFromBinderStatus(mGraphicBufferSource->signalEndOfInputStream()); 7264 } 7265 if (err != OK) { 7266 notify->setInt32("err", err); 7267 } 7268 notify->post(); 7269} 7270 7271bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 7272 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7273 return true; 7274} 7275 7276bool ACodec::ExecutingState::onOMXEvent( 7277 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7278 switch (event) { 7279 case OMX_EventPortSettingsChanged: 7280 { 7281 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 7282 7283 mCodec->onOutputFormatChanged(); 7284 7285 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 7286 mCodec->mMetadataBuffersToSubmit = 0; 7287 CHECK_EQ(mCodec->mOMXNode->sendCommand( 7288 OMX_CommandPortDisable, kPortIndexOutput), 7289 (status_t)OK); 7290 7291 mCodec->freeOutputBuffersNotOwnedByComponent(); 7292 7293 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7294 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7295 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7296 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7297 mCodec->mComponentName.c_str(), data2); 7298 } 7299 7300 return true; 7301 } 7302 7303 case OMX_EventBufferFlag: 7304 { 7305 return true; 7306 } 7307 7308 default: 7309 return BaseState::onOMXEvent(event, data1, data2); 7310 } 7311} 7312 7313//////////////////////////////////////////////////////////////////////////////// 7314 7315ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7316 ACodec *codec) 7317 : BaseState(codec) { 7318} 7319 7320ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7321 OMX_U32 portIndex) { 7322 if (portIndex == kPortIndexOutput) { 7323 return FREE_BUFFERS; 7324 } 7325 7326 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7327 7328 return RESUBMIT_BUFFERS; 7329} 7330 7331bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7332 const sp<AMessage> &msg) { 7333 bool handled = false; 7334 7335 switch (msg->what()) { 7336 case kWhatFlush: 7337 case kWhatShutdown: 7338 case kWhatResume: 7339 case kWhatSetParameters: 7340 { 7341 if (msg->what() == kWhatResume) { 7342 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7343 } 7344 7345 mCodec->deferMessage(msg); 7346 handled = true; 7347 break; 7348 } 7349 7350 default: 7351 handled = BaseState::onMessageReceived(msg); 7352 break; 7353 } 7354 7355 return handled; 7356} 7357 7358void ACodec::OutputPortSettingsChangedState::stateEntered() { 7359 ALOGV("[%s] Now handling output port settings change", 7360 mCodec->mComponentName.c_str()); 7361} 7362 7363bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7364 int64_t mediaTimeUs, nsecs_t systemNano) { 7365 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7366 return true; 7367} 7368 7369bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7370 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7371 switch (event) { 7372 case OMX_EventCmdComplete: 7373 { 7374 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7375 if (data2 != (OMX_U32)kPortIndexOutput) { 7376 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7377 return false; 7378 } 7379 7380 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7381 7382 status_t err = OK; 7383 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7384 ALOGE("disabled port should be empty, but has %zu buffers", 7385 mCodec->mBuffers[kPortIndexOutput].size()); 7386 err = FAILED_TRANSACTION; 7387 } else { 7388 mCodec->mDealer[kPortIndexOutput].clear(); 7389 } 7390 7391 if (err == OK) { 7392 err = mCodec->mOMXNode->sendCommand( 7393 OMX_CommandPortEnable, kPortIndexOutput); 7394 } 7395 7396 if (err == OK) { 7397 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7398 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7399 "reconfiguration: (%d)", err); 7400 } 7401 7402 if (err != OK) { 7403 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7404 7405 // This is technically not correct, but appears to be 7406 // the only way to free the component instance. 7407 // Controlled transitioning from excecuting->idle 7408 // and idle->loaded seem impossible probably because 7409 // the output port never finishes re-enabling. 7410 mCodec->mShutdownInProgress = true; 7411 mCodec->mKeepComponentAllocated = false; 7412 mCodec->changeState(mCodec->mLoadedState); 7413 } 7414 7415 return true; 7416 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7417 if (data2 != (OMX_U32)kPortIndexOutput) { 7418 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7419 return false; 7420 } 7421 7422 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7423 7424 if (mCodec->mExecutingState->active()) { 7425 mCodec->mExecutingState->submitOutputBuffers(); 7426 } 7427 7428 mCodec->changeState(mCodec->mExecutingState); 7429 7430 return true; 7431 } 7432 7433 return false; 7434 } 7435 7436 default: 7437 return false; 7438 } 7439} 7440 7441//////////////////////////////////////////////////////////////////////////////// 7442 7443ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7444 : BaseState(codec), 7445 mComponentNowIdle(false) { 7446} 7447 7448bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7449 bool handled = false; 7450 7451 switch (msg->what()) { 7452 case kWhatFlush: 7453 { 7454 // Don't send me a flush request if you previously wanted me 7455 // to shutdown. 7456 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7457 break; 7458 } 7459 7460 case kWhatShutdown: 7461 { 7462 // We're already doing that... 7463 7464 handled = true; 7465 break; 7466 } 7467 7468 default: 7469 handled = BaseState::onMessageReceived(msg); 7470 break; 7471 } 7472 7473 return handled; 7474} 7475 7476void ACodec::ExecutingToIdleState::stateEntered() { 7477 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7478 7479 mComponentNowIdle = false; 7480 mCodec->mLastOutputFormat.clear(); 7481} 7482 7483bool ACodec::ExecutingToIdleState::onOMXEvent( 7484 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7485 switch (event) { 7486 case OMX_EventCmdComplete: 7487 { 7488 if (data1 != (OMX_U32)OMX_CommandStateSet 7489 || data2 != (OMX_U32)OMX_StateIdle) { 7490 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7491 asString((OMX_COMMANDTYPE)data1), data1, 7492 asString((OMX_STATETYPE)data2), data2); 7493 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7494 return true; 7495 } 7496 7497 mComponentNowIdle = true; 7498 7499 changeStateIfWeOwnAllBuffers(); 7500 7501 return true; 7502 } 7503 7504 case OMX_EventPortSettingsChanged: 7505 case OMX_EventBufferFlag: 7506 { 7507 // We're shutting down and don't care about this anymore. 7508 return true; 7509 } 7510 7511 default: 7512 return BaseState::onOMXEvent(event, data1, data2); 7513 } 7514} 7515 7516void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7517 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7518 status_t err = mCodec->mOMXNode->sendCommand( 7519 OMX_CommandStateSet, OMX_StateLoaded); 7520 if (err == OK) { 7521 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7522 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7523 if (err == OK) { 7524 err = err2; 7525 } 7526 } 7527 7528 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7529 && mCodec->mNativeWindow != NULL) { 7530 // We push enough 1x1 blank buffers to ensure that one of 7531 // them has made it to the display. This allows the OMX 7532 // component teardown to zero out any protected buffers 7533 // without the risk of scanning out one of those buffers. 7534 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7535 } 7536 7537 if (err != OK) { 7538 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7539 return; 7540 } 7541 7542 mCodec->changeState(mCodec->mIdleToLoadedState); 7543 } 7544} 7545 7546void ACodec::ExecutingToIdleState::onInputBufferFilled( 7547 const sp<AMessage> &msg) { 7548 BaseState::onInputBufferFilled(msg); 7549 7550 changeStateIfWeOwnAllBuffers(); 7551} 7552 7553void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7554 const sp<AMessage> &msg) { 7555 BaseState::onOutputBufferDrained(msg); 7556 7557 changeStateIfWeOwnAllBuffers(); 7558} 7559 7560//////////////////////////////////////////////////////////////////////////////// 7561 7562ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7563 : BaseState(codec) { 7564} 7565 7566bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7567 bool handled = false; 7568 7569 switch (msg->what()) { 7570 case kWhatShutdown: 7571 { 7572 // We're already doing that... 7573 7574 handled = true; 7575 break; 7576 } 7577 7578 case kWhatFlush: 7579 { 7580 // Don't send me a flush request if you previously wanted me 7581 // to shutdown. 7582 ALOGE("Got flush request in IdleToLoadedState"); 7583 break; 7584 } 7585 7586 default: 7587 handled = BaseState::onMessageReceived(msg); 7588 break; 7589 } 7590 7591 return handled; 7592} 7593 7594void ACodec::IdleToLoadedState::stateEntered() { 7595 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7596} 7597 7598bool ACodec::IdleToLoadedState::onOMXEvent( 7599 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7600 switch (event) { 7601 case OMX_EventCmdComplete: 7602 { 7603 if (data1 != (OMX_U32)OMX_CommandStateSet 7604 || data2 != (OMX_U32)OMX_StateLoaded) { 7605 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7606 asString((OMX_COMMANDTYPE)data1), data1, 7607 asString((OMX_STATETYPE)data2), data2); 7608 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7609 return true; 7610 } 7611 7612 mCodec->changeState(mCodec->mLoadedState); 7613 7614 return true; 7615 } 7616 7617 default: 7618 return BaseState::onOMXEvent(event, data1, data2); 7619 } 7620} 7621 7622//////////////////////////////////////////////////////////////////////////////// 7623 7624ACodec::FlushingState::FlushingState(ACodec *codec) 7625 : BaseState(codec) { 7626} 7627 7628void ACodec::FlushingState::stateEntered() { 7629 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7630 7631 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7632} 7633 7634bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7635 bool handled = false; 7636 7637 switch (msg->what()) { 7638 case kWhatShutdown: 7639 { 7640 mCodec->deferMessage(msg); 7641 break; 7642 } 7643 7644 case kWhatFlush: 7645 { 7646 // We're already doing this right now. 7647 handled = true; 7648 break; 7649 } 7650 7651 default: 7652 handled = BaseState::onMessageReceived(msg); 7653 break; 7654 } 7655 7656 return handled; 7657} 7658 7659bool ACodec::FlushingState::onOMXEvent( 7660 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7661 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7662 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7663 7664 switch (event) { 7665 case OMX_EventCmdComplete: 7666 { 7667 if (data1 != (OMX_U32)OMX_CommandFlush) { 7668 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7669 asString((OMX_COMMANDTYPE)data1), data1, data2); 7670 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7671 return true; 7672 } 7673 7674 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7675 if (mFlushComplete[data2]) { 7676 ALOGW("Flush already completed for %s port", 7677 data2 == kPortIndexInput ? "input" : "output"); 7678 return true; 7679 } 7680 mFlushComplete[data2] = true; 7681 7682 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7683 changeStateIfWeOwnAllBuffers(); 7684 } 7685 } else if (data2 == OMX_ALL) { 7686 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7687 ALOGW("received flush complete event for OMX_ALL before ports have been" 7688 "flushed (%d/%d)", 7689 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7690 return false; 7691 } 7692 7693 changeStateIfWeOwnAllBuffers(); 7694 } else { 7695 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7696 } 7697 7698 return true; 7699 } 7700 7701 case OMX_EventPortSettingsChanged: 7702 { 7703 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7704 msg->setInt32("type", omx_message::EVENT); 7705 msg->setInt32("generation", mCodec->mNodeGeneration); 7706 msg->setInt32("event", event); 7707 msg->setInt32("data1", data1); 7708 msg->setInt32("data2", data2); 7709 7710 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7711 mCodec->mComponentName.c_str()); 7712 7713 mCodec->deferMessage(msg); 7714 7715 return true; 7716 } 7717 7718 default: 7719 return BaseState::onOMXEvent(event, data1, data2); 7720 } 7721 7722 return true; 7723} 7724 7725void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7726 BaseState::onOutputBufferDrained(msg); 7727 7728 changeStateIfWeOwnAllBuffers(); 7729} 7730 7731void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7732 BaseState::onInputBufferFilled(msg); 7733 7734 changeStateIfWeOwnAllBuffers(); 7735} 7736 7737void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 7738 if (mFlushComplete[kPortIndexInput] 7739 && mFlushComplete[kPortIndexOutput] 7740 && mCodec->allYourBuffersAreBelongToUs()) { 7741 // We now own all buffers except possibly those still queued with 7742 // the native window for rendering. Let's get those back as well. 7743 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 7744 7745 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7746 7747 sp<AMessage> notify = mCodec->mNotify->dup(); 7748 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7749 notify->post(); 7750 7751 mCodec->mPortEOS[kPortIndexInput] = 7752 mCodec->mPortEOS[kPortIndexOutput] = false; 7753 7754 mCodec->mInputEOSResult = OK; 7755 7756 if (mCodec->mSkipCutBuffer != NULL) { 7757 mCodec->mSkipCutBuffer->clear(); 7758 } 7759 7760 mCodec->changeState(mCodec->mExecutingState); 7761 } 7762} 7763 7764status_t ACodec::queryCapabilities( 7765 const AString &name, const AString &mime, bool isEncoder, 7766 sp<MediaCodecInfo::Capabilities> *caps) { 7767 (*caps).clear(); 7768 const char *role = GetComponentRole(isEncoder, mime.c_str()); 7769 if (role == NULL) { 7770 return BAD_VALUE; 7771 } 7772 7773 OMXClient client; 7774 status_t err = client.connect(); 7775 if (err != OK) { 7776 return err; 7777 } 7778 7779 sp<IOMX> omx = client.interface(); 7780 sp<CodecObserver> observer = new CodecObserver; 7781 sp<IOMXNode> omxNode; 7782 7783 err = omx->allocateNode(name.c_str(), observer, &omxNode); 7784 if (err != OK) { 7785 client.disconnect(); 7786 return err; 7787 } 7788 7789 err = SetComponentRole(omxNode, role); 7790 if (err != OK) { 7791 omxNode->freeNode(); 7792 client.disconnect(); 7793 return err; 7794 } 7795 7796 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 7797 bool isVideo = mime.startsWithIgnoreCase("video/"); 7798 7799 if (isVideo) { 7800 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 7801 InitOMXParams(¶m); 7802 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7803 7804 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7805 param.nProfileIndex = index; 7806 status_t err = omxNode->getParameter( 7807 OMX_IndexParamVideoProfileLevelQuerySupported, 7808 ¶m, sizeof(param)); 7809 if (err != OK) { 7810 break; 7811 } 7812 builder->addProfileLevel(param.eProfile, param.eLevel); 7813 7814 if (index == kMaxIndicesToCheck) { 7815 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 7816 name.c_str(), index, 7817 param.eProfile, param.eLevel); 7818 } 7819 } 7820 7821 // Color format query 7822 // return colors in the order reported by the OMX component 7823 // prefix "flexible" standard ones with the flexible equivalent 7824 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 7825 InitOMXParams(&portFormat); 7826 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 7827 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 7828 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7829 portFormat.nIndex = index; 7830 status_t err = omxNode->getParameter( 7831 OMX_IndexParamVideoPortFormat, 7832 &portFormat, sizeof(portFormat)); 7833 if (err != OK) { 7834 break; 7835 } 7836 7837 OMX_U32 flexibleEquivalent; 7838 if (IsFlexibleColorFormat( 7839 omxNode, portFormat.eColorFormat, false /* usingNativeWindow */, 7840 &flexibleEquivalent)) { 7841 bool marked = false; 7842 for (size_t i = 0; i < supportedColors.size(); ++i) { 7843 if (supportedColors[i] == flexibleEquivalent) { 7844 marked = true; 7845 break; 7846 } 7847 } 7848 if (!marked) { 7849 supportedColors.push(flexibleEquivalent); 7850 builder->addColorFormat(flexibleEquivalent); 7851 } 7852 } 7853 supportedColors.push(portFormat.eColorFormat); 7854 builder->addColorFormat(portFormat.eColorFormat); 7855 7856 if (index == kMaxIndicesToCheck) { 7857 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 7858 name.c_str(), index, 7859 asString(portFormat.eColorFormat), portFormat.eColorFormat); 7860 } 7861 } 7862 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 7863 // More audio codecs if they have profiles. 7864 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 7865 InitOMXParams(¶m); 7866 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7867 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7868 param.nProfileIndex = index; 7869 status_t err = omxNode->getParameter( 7870 (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 7871 ¶m, sizeof(param)); 7872 if (err != OK) { 7873 break; 7874 } 7875 // For audio, level is ignored. 7876 builder->addProfileLevel(param.eProfile, 0 /* level */); 7877 7878 if (index == kMaxIndicesToCheck) { 7879 ALOGW("[%s] stopping checking profiles after %u: %x", 7880 name.c_str(), index, 7881 param.eProfile); 7882 } 7883 } 7884 7885 // NOTE: Without Android extensions, OMX does not provide a way to query 7886 // AAC profile support 7887 if (param.nProfileIndex == 0) { 7888 ALOGW("component %s doesn't support profile query.", name.c_str()); 7889 } 7890 } 7891 7892 if (isVideo && !isEncoder) { 7893 native_handle_t *sidebandHandle = NULL; 7894 if (omxNode->configureVideoTunnelMode( 7895 kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 7896 // tunneled playback includes adaptive playback 7897 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 7898 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 7899 } else if (omxNode->storeMetaDataInBuffers( 7900 kPortIndexOutput, OMX_TRUE) == OK || 7901 omxNode->prepareForAdaptivePlayback( 7902 kPortIndexOutput, OMX_TRUE, 7903 1280 /* width */, 720 /* height */) == OK) { 7904 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 7905 } 7906 } 7907 7908 if (isVideo && isEncoder) { 7909 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 7910 InitOMXParams(¶ms); 7911 params.nPortIndex = kPortIndexOutput; 7912 // TODO: should we verify if fallback is supported? 7913 if (omxNode->getConfig( 7914 (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 7915 ¶ms, sizeof(params)) == OK) { 7916 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 7917 } 7918 } 7919 7920 *caps = builder; 7921 omxNode->freeNode(); 7922 client.disconnect(); 7923 return OK; 7924} 7925 7926// These are supposed be equivalent to the logic in 7927// "audio_channel_out_mask_from_count". 7928//static 7929status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 7930 switch (numChannels) { 7931 case 1: 7932 map[0] = OMX_AUDIO_ChannelCF; 7933 break; 7934 case 2: 7935 map[0] = OMX_AUDIO_ChannelLF; 7936 map[1] = OMX_AUDIO_ChannelRF; 7937 break; 7938 case 3: 7939 map[0] = OMX_AUDIO_ChannelLF; 7940 map[1] = OMX_AUDIO_ChannelRF; 7941 map[2] = OMX_AUDIO_ChannelCF; 7942 break; 7943 case 4: 7944 map[0] = OMX_AUDIO_ChannelLF; 7945 map[1] = OMX_AUDIO_ChannelRF; 7946 map[2] = OMX_AUDIO_ChannelLR; 7947 map[3] = OMX_AUDIO_ChannelRR; 7948 break; 7949 case 5: 7950 map[0] = OMX_AUDIO_ChannelLF; 7951 map[1] = OMX_AUDIO_ChannelRF; 7952 map[2] = OMX_AUDIO_ChannelCF; 7953 map[3] = OMX_AUDIO_ChannelLR; 7954 map[4] = OMX_AUDIO_ChannelRR; 7955 break; 7956 case 6: 7957 map[0] = OMX_AUDIO_ChannelLF; 7958 map[1] = OMX_AUDIO_ChannelRF; 7959 map[2] = OMX_AUDIO_ChannelCF; 7960 map[3] = OMX_AUDIO_ChannelLFE; 7961 map[4] = OMX_AUDIO_ChannelLR; 7962 map[5] = OMX_AUDIO_ChannelRR; 7963 break; 7964 case 7: 7965 map[0] = OMX_AUDIO_ChannelLF; 7966 map[1] = OMX_AUDIO_ChannelRF; 7967 map[2] = OMX_AUDIO_ChannelCF; 7968 map[3] = OMX_AUDIO_ChannelLFE; 7969 map[4] = OMX_AUDIO_ChannelLR; 7970 map[5] = OMX_AUDIO_ChannelRR; 7971 map[6] = OMX_AUDIO_ChannelCS; 7972 break; 7973 case 8: 7974 map[0] = OMX_AUDIO_ChannelLF; 7975 map[1] = OMX_AUDIO_ChannelRF; 7976 map[2] = OMX_AUDIO_ChannelCF; 7977 map[3] = OMX_AUDIO_ChannelLFE; 7978 map[4] = OMX_AUDIO_ChannelLR; 7979 map[5] = OMX_AUDIO_ChannelRR; 7980 map[6] = OMX_AUDIO_ChannelLS; 7981 map[7] = OMX_AUDIO_ChannelRS; 7982 break; 7983 default: 7984 return -EINVAL; 7985 } 7986 7987 return OK; 7988} 7989 7990} // namespace android 7991