ACodec.cpp revision addf2cbb120346ae42e78fa739245a353db5edad
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47 48#include <OMX_AudioExt.h> 49#include <OMX_VideoExt.h> 50#include <OMX_Component.h> 51#include <OMX_IndexExt.h> 52#include <OMX_AsString.h> 53 54#include "include/avc_utils.h" 55#include "include/DataConverter.h" 56#include "include/SharedMemoryBuffer.h" 57#include "omx/OMXUtils.h" 58 59namespace android { 60 61using binder::Status; 62 63enum { 64 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 65}; 66 67// OMX errors are directly mapped into status_t range if 68// there is no corresponding MediaError status code. 69// Use the statusFromOMXError(int32_t omxError) function. 70// 71// Currently this is a direct map. 72// See frameworks/native/include/media/openmax/OMX_Core.h 73// 74// Vendor OMX errors from 0x90000000 - 0x9000FFFF 75// Extension OMX errors from 0x8F000000 - 0x90000000 76// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 77// 78 79// returns true if err is a recognized OMX error code. 80// as OMX error is OMX_S32, this is an int32_t type 81static inline bool isOMXError(int32_t err) { 82 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 83} 84 85// converts an OMX error to a status_t 86static inline status_t statusFromOMXError(int32_t omxError) { 87 switch (omxError) { 88 case OMX_ErrorInvalidComponentName: 89 case OMX_ErrorComponentNotFound: 90 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 91 default: 92 return isOMXError(omxError) ? omxError : 0; // no translation required 93 } 94} 95 96static inline status_t statusFromBinderStatus(const Status &status) { 97 if (status.isOk()) { 98 return OK; 99 } 100 status_t err; 101 if ((err = status.serviceSpecificErrorCode()) != OK) { 102 return err; 103 } 104 if ((err = status.transactionError()) != OK) { 105 return err; 106 } 107 // Other exception 108 return UNKNOWN_ERROR; 109} 110 111// checks and converts status_t to a non-side-effect status_t 112static inline status_t makeNoSideEffectStatus(status_t err) { 113 switch (err) { 114 // the following errors have side effects and may come 115 // from other code modules. Remap for safety reasons. 116 case INVALID_OPERATION: 117 case DEAD_OBJECT: 118 return UNKNOWN_ERROR; 119 default: 120 return err; 121 } 122} 123 124struct MessageList : public RefBase { 125 MessageList() { 126 } 127 virtual ~MessageList() { 128 } 129 std::list<sp<AMessage> > &getList() { return mList; } 130private: 131 std::list<sp<AMessage> > mList; 132 133 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 134}; 135 136static sp<DataConverter> getCopyConverter() { 137 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited 138 static sp<DataConverter> sCopyConverter; // zero-inited 139 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); 140 return sCopyConverter; 141} 142 143struct CodecObserver : public BnOMXObserver { 144 CodecObserver() {} 145 146 void setNotificationMessage(const sp<AMessage> &msg) { 147 mNotify = msg; 148 } 149 150 // from IOMXObserver 151 virtual void onMessages(const std::list<omx_message> &messages) { 152 if (messages.empty()) { 153 return; 154 } 155 156 sp<AMessage> notify = mNotify->dup(); 157 sp<MessageList> msgList = new MessageList(); 158 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 159 it != messages.cend(); ++it) { 160 const omx_message &omx_msg = *it; 161 162 sp<AMessage> msg = new AMessage; 163 msg->setInt32("type", omx_msg.type); 164 switch (omx_msg.type) { 165 case omx_message::EVENT: 166 { 167 msg->setInt32("event", omx_msg.u.event_data.event); 168 msg->setInt32("data1", omx_msg.u.event_data.data1); 169 msg->setInt32("data2", omx_msg.u.event_data.data2); 170 break; 171 } 172 173 case omx_message::EMPTY_BUFFER_DONE: 174 { 175 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 176 msg->setInt32("fence_fd", omx_msg.fenceFd); 177 break; 178 } 179 180 case omx_message::FILL_BUFFER_DONE: 181 { 182 msg->setInt32( 183 "buffer", omx_msg.u.extended_buffer_data.buffer); 184 msg->setInt32( 185 "range_offset", 186 omx_msg.u.extended_buffer_data.range_offset); 187 msg->setInt32( 188 "range_length", 189 omx_msg.u.extended_buffer_data.range_length); 190 msg->setInt32( 191 "flags", 192 omx_msg.u.extended_buffer_data.flags); 193 msg->setInt64( 194 "timestamp", 195 omx_msg.u.extended_buffer_data.timestamp); 196 msg->setInt32( 197 "fence_fd", omx_msg.fenceFd); 198 break; 199 } 200 201 case omx_message::FRAME_RENDERED: 202 { 203 msg->setInt64( 204 "media_time_us", omx_msg.u.render_data.timestamp); 205 msg->setInt64( 206 "system_nano", omx_msg.u.render_data.nanoTime); 207 break; 208 } 209 210 default: 211 ALOGE("Unrecognized message type: %d", omx_msg.type); 212 break; 213 } 214 msgList->getList().push_back(msg); 215 } 216 notify->setObject("messages", msgList); 217 notify->post(); 218 } 219 220protected: 221 virtual ~CodecObserver() {} 222 223private: 224 sp<AMessage> mNotify; 225 226 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 227}; 228 229//////////////////////////////////////////////////////////////////////////////// 230 231struct ACodec::BaseState : public AState { 232 explicit BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 233 234protected: 235 enum PortMode { 236 KEEP_BUFFERS, 237 RESUBMIT_BUFFERS, 238 FREE_BUFFERS, 239 }; 240 241 ACodec *mCodec; 242 243 virtual PortMode getPortMode(OMX_U32 portIndex); 244 245 virtual bool onMessageReceived(const sp<AMessage> &msg); 246 247 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 248 249 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 250 virtual void onInputBufferFilled(const sp<AMessage> &msg); 251 252 void postFillThisBuffer(BufferInfo *info); 253 254private: 255 // Handles an OMX message. Returns true iff message was handled. 256 bool onOMXMessage(const sp<AMessage> &msg); 257 258 // Handles a list of messages. Returns true iff messages were handled. 259 bool onOMXMessageList(const sp<AMessage> &msg); 260 261 // returns true iff this message is for this component and the component is alive 262 bool checkOMXMessage(const sp<AMessage> &msg); 263 264 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 265 266 bool onOMXFillBufferDone( 267 IOMX::buffer_id bufferID, 268 size_t rangeOffset, size_t rangeLength, 269 OMX_U32 flags, 270 int64_t timeUs, 271 int fenceFd); 272 273 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 274 275 void getMoreInputDataIfPossible(); 276 277 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 278}; 279 280//////////////////////////////////////////////////////////////////////////////// 281 282struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 283 explicit DeathNotifier(const sp<AMessage> ¬ify) 284 : mNotify(notify) { 285 } 286 287 virtual void binderDied(const wp<IBinder> &) { 288 mNotify->post(); 289 } 290 291protected: 292 virtual ~DeathNotifier() {} 293 294private: 295 sp<AMessage> mNotify; 296 297 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 298}; 299 300struct ACodec::UninitializedState : public ACodec::BaseState { 301 explicit UninitializedState(ACodec *codec); 302 303protected: 304 virtual bool onMessageReceived(const sp<AMessage> &msg); 305 virtual void stateEntered(); 306 307private: 308 void onSetup(const sp<AMessage> &msg); 309 bool onAllocateComponent(const sp<AMessage> &msg); 310 311 sp<DeathNotifier> mDeathNotifier; 312 313 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 314}; 315 316//////////////////////////////////////////////////////////////////////////////// 317 318struct ACodec::LoadedState : public ACodec::BaseState { 319 explicit LoadedState(ACodec *codec); 320 321protected: 322 virtual bool onMessageReceived(const sp<AMessage> &msg); 323 virtual void stateEntered(); 324 325private: 326 friend struct ACodec::UninitializedState; 327 328 bool onConfigureComponent(const sp<AMessage> &msg); 329 void onCreateInputSurface(const sp<AMessage> &msg); 330 void onSetInputSurface(const sp<AMessage> &msg); 331 void onStart(); 332 void onShutdown(bool keepComponentAllocated); 333 334 status_t setupInputSurface(); 335 336 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 337}; 338 339//////////////////////////////////////////////////////////////////////////////// 340 341struct ACodec::LoadedToIdleState : public ACodec::BaseState { 342 explicit LoadedToIdleState(ACodec *codec); 343 344protected: 345 virtual bool onMessageReceived(const sp<AMessage> &msg); 346 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 347 virtual void stateEntered(); 348 349private: 350 status_t allocateBuffers(); 351 352 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 353}; 354 355//////////////////////////////////////////////////////////////////////////////// 356 357struct ACodec::IdleToExecutingState : public ACodec::BaseState { 358 explicit IdleToExecutingState(ACodec *codec); 359 360protected: 361 virtual bool onMessageReceived(const sp<AMessage> &msg); 362 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 363 virtual void stateEntered(); 364 365private: 366 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 367}; 368 369//////////////////////////////////////////////////////////////////////////////// 370 371struct ACodec::ExecutingState : public ACodec::BaseState { 372 explicit ExecutingState(ACodec *codec); 373 374 void submitRegularOutputBuffers(); 375 void submitOutputMetaBuffers(); 376 void submitOutputBuffers(); 377 378 // Submit output buffers to the decoder, submit input buffers to client 379 // to fill with data. 380 void resume(); 381 382 // Returns true iff input and output buffers are in play. 383 bool active() const { return mActive; } 384 385protected: 386 virtual PortMode getPortMode(OMX_U32 portIndex); 387 virtual bool onMessageReceived(const sp<AMessage> &msg); 388 virtual void stateEntered(); 389 390 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 391 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 392 393private: 394 bool mActive; 395 396 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 397}; 398 399//////////////////////////////////////////////////////////////////////////////// 400 401struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 402 explicit OutputPortSettingsChangedState(ACodec *codec); 403 404protected: 405 virtual PortMode getPortMode(OMX_U32 portIndex); 406 virtual bool onMessageReceived(const sp<AMessage> &msg); 407 virtual void stateEntered(); 408 409 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 410 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 411 412private: 413 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 414}; 415 416//////////////////////////////////////////////////////////////////////////////// 417 418struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 419 explicit ExecutingToIdleState(ACodec *codec); 420 421protected: 422 virtual bool onMessageReceived(const sp<AMessage> &msg); 423 virtual void stateEntered(); 424 425 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 426 427 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 428 virtual void onInputBufferFilled(const sp<AMessage> &msg); 429 430private: 431 void changeStateIfWeOwnAllBuffers(); 432 433 bool mComponentNowIdle; 434 435 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 436}; 437 438//////////////////////////////////////////////////////////////////////////////// 439 440struct ACodec::IdleToLoadedState : public ACodec::BaseState { 441 explicit IdleToLoadedState(ACodec *codec); 442 443protected: 444 virtual bool onMessageReceived(const sp<AMessage> &msg); 445 virtual void stateEntered(); 446 447 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 448 449private: 450 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 451}; 452 453//////////////////////////////////////////////////////////////////////////////// 454 455struct ACodec::FlushingState : public ACodec::BaseState { 456 explicit FlushingState(ACodec *codec); 457 458protected: 459 virtual bool onMessageReceived(const sp<AMessage> &msg); 460 virtual void stateEntered(); 461 462 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 463 464 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 465 virtual void onInputBufferFilled(const sp<AMessage> &msg); 466 467private: 468 bool mFlushComplete[2]; 469 470 void changeStateIfWeOwnAllBuffers(); 471 472 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 473}; 474 475//////////////////////////////////////////////////////////////////////////////// 476 477void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 478 if (mFenceFd >= 0) { 479 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 480 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 481 } 482 mFenceFd = fenceFd; 483 mIsReadFence = false; 484} 485 486void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 487 if (mFenceFd >= 0) { 488 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 489 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 490 } 491 mFenceFd = fenceFd; 492 mIsReadFence = true; 493} 494 495void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 496 if (mFenceFd >= 0 && mIsReadFence) { 497 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 498 } 499} 500 501void ACodec::BufferInfo::checkReadFence(const char *dbg) { 502 if (mFenceFd >= 0 && !mIsReadFence) { 503 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 504 } 505} 506 507//////////////////////////////////////////////////////////////////////////////// 508 509ACodec::ACodec() 510 : mSampleRate(0), 511 mNodeGeneration(0), 512 mUsingNativeWindow(false), 513 mNativeWindowUsageBits(0), 514 mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN), 515 mIsVideo(false), 516 mIsEncoder(false), 517 mFatalError(false), 518 mShutdownInProgress(false), 519 mExplicitShutdown(false), 520 mIsLegacyVP9Decoder(false), 521 mEncoderDelay(0), 522 mEncoderPadding(0), 523 mRotationDegrees(0), 524 mChannelMaskPresent(false), 525 mChannelMask(0), 526 mDequeueCounter(0), 527 mInputMetadataType(kMetadataBufferTypeInvalid), 528 mOutputMetadataType(kMetadataBufferTypeInvalid), 529 mLegacyAdaptiveExperiment(false), 530 mMetadataBuffersToSubmit(0), 531 mNumUndequeuedBuffers(0), 532 mRepeatFrameDelayUs(-1ll), 533 mMaxPtsGapUs(-1ll), 534 mMaxFps(-1), 535 mTimePerFrameUs(-1ll), 536 mTimePerCaptureUs(-1ll), 537 mCreateInputBuffersSuspended(false), 538 mTunneled(false), 539 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0), 540 mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0) { 541 mUninitializedState = new UninitializedState(this); 542 mLoadedState = new LoadedState(this); 543 mLoadedToIdleState = new LoadedToIdleState(this); 544 mIdleToExecutingState = new IdleToExecutingState(this); 545 mExecutingState = new ExecutingState(this); 546 547 mOutputPortSettingsChangedState = 548 new OutputPortSettingsChangedState(this); 549 550 mExecutingToIdleState = new ExecutingToIdleState(this); 551 mIdleToLoadedState = new IdleToLoadedState(this); 552 mFlushingState = new FlushingState(this); 553 554 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 555 mInputEOSResult = OK; 556 557 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 558 559 changeState(mUninitializedState); 560} 561 562ACodec::~ACodec() { 563} 564 565void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 566 mNotify = msg; 567} 568 569void ACodec::initiateSetup(const sp<AMessage> &msg) { 570 msg->setWhat(kWhatSetup); 571 msg->setTarget(this); 572 msg->post(); 573} 574 575void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 576 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 577 msg->setMessage("params", params); 578 msg->post(); 579} 580 581void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 582 msg->setWhat(kWhatAllocateComponent); 583 msg->setTarget(this); 584 msg->post(); 585} 586 587void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 588 msg->setWhat(kWhatConfigureComponent); 589 msg->setTarget(this); 590 msg->post(); 591} 592 593status_t ACodec::setSurface(const sp<Surface> &surface) { 594 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 595 msg->setObject("surface", surface); 596 597 sp<AMessage> response; 598 status_t err = msg->postAndAwaitResponse(&response); 599 600 if (err == OK) { 601 (void)response->findInt32("err", &err); 602 } 603 return err; 604} 605 606void ACodec::initiateCreateInputSurface() { 607 (new AMessage(kWhatCreateInputSurface, this))->post(); 608} 609 610void ACodec::initiateSetInputSurface( 611 const sp<PersistentSurface> &surface) { 612 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 613 msg->setObject("input-surface", surface); 614 msg->post(); 615} 616 617void ACodec::signalEndOfInputStream() { 618 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 619} 620 621void ACodec::initiateStart() { 622 (new AMessage(kWhatStart, this))->post(); 623} 624 625void ACodec::signalFlush() { 626 ALOGV("[%s] signalFlush", mComponentName.c_str()); 627 (new AMessage(kWhatFlush, this))->post(); 628} 629 630void ACodec::signalResume() { 631 (new AMessage(kWhatResume, this))->post(); 632} 633 634void ACodec::initiateShutdown(bool keepComponentAllocated) { 635 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 636 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 637 msg->post(); 638 if (!keepComponentAllocated) { 639 // ensure shutdown completes in 3 seconds 640 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 641 } 642} 643 644void ACodec::signalRequestIDRFrame() { 645 (new AMessage(kWhatRequestIDRFrame, this))->post(); 646} 647 648// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 649// Some codecs may return input buffers before having them processed. 650// This causes a halt if we already signaled an EOS on the input 651// port. For now keep submitting an output buffer if there was an 652// EOS on the input port, but not yet on the output port. 653void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 654 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 655 mMetadataBuffersToSubmit > 0) { 656 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 657 } 658} 659 660status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 661 // allow keeping unset surface 662 if (surface == NULL) { 663 if (mNativeWindow != NULL) { 664 ALOGW("cannot unset a surface"); 665 return INVALID_OPERATION; 666 } 667 return OK; 668 } 669 670 // cannot switch from bytebuffers to surface 671 if (mNativeWindow == NULL) { 672 ALOGW("component was not configured with a surface"); 673 return INVALID_OPERATION; 674 } 675 676 ANativeWindow *nativeWindow = surface.get(); 677 // if we have not yet started the codec, we can simply set the native window 678 if (mBuffers[kPortIndexInput].size() == 0) { 679 mNativeWindow = surface; 680 return OK; 681 } 682 683 // we do not support changing a tunneled surface after start 684 if (mTunneled) { 685 ALOGW("cannot change tunneled surface"); 686 return INVALID_OPERATION; 687 } 688 689 int usageBits = 0; 690 // no need to reconnect as we will not dequeue all buffers 691 status_t err = setupNativeWindowSizeFormatAndUsage( 692 nativeWindow, &usageBits, 693 !storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment /* reconnect */); 694 if (err != OK) { 695 return err; 696 } 697 698 int ignoredFlags = kVideoGrallocUsage; 699 // New output surface is not allowed to add new usage flag except ignored ones. 700 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 701 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 702 return BAD_VALUE; 703 } 704 705 // get min undequeued count. We cannot switch to a surface that has a higher 706 // undequeued count than we allocated. 707 int minUndequeuedBuffers = 0; 708 err = nativeWindow->query( 709 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 710 &minUndequeuedBuffers); 711 if (err != 0) { 712 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 713 strerror(-err), -err); 714 return err; 715 } 716 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 717 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 718 minUndequeuedBuffers, mNumUndequeuedBuffers); 719 return BAD_VALUE; 720 } 721 722 // we cannot change the number of output buffers while OMX is running 723 // set up surface to the same count 724 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 725 ALOGV("setting up surface for %zu buffers", buffers.size()); 726 727 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 728 if (err != 0) { 729 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 730 -err); 731 return err; 732 } 733 734 // need to enable allocation when attaching 735 surface->getIGraphicBufferProducer()->allowAllocation(true); 736 737 // for meta data mode, we move dequeud buffers to the new surface. 738 // for non-meta mode, we must move all registered buffers 739 for (size_t i = 0; i < buffers.size(); ++i) { 740 const BufferInfo &info = buffers[i]; 741 // skip undequeued buffers for meta data mode 742 if (storingMetadataInDecodedBuffers() 743 && !mLegacyAdaptiveExperiment 744 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 745 ALOGV("skipping buffer"); 746 continue; 747 } 748 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 749 750 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 751 if (err != OK) { 752 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 753 info.mGraphicBuffer->getNativeBuffer(), 754 strerror(-err), -err); 755 return err; 756 } 757 } 758 759 // cancel undequeued buffers to new surface 760 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 761 for (size_t i = 0; i < buffers.size(); ++i) { 762 BufferInfo &info = buffers.editItemAt(i); 763 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 764 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 765 err = nativeWindow->cancelBuffer( 766 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 767 info.mFenceFd = -1; 768 if (err != OK) { 769 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 770 info.mGraphicBuffer->getNativeBuffer(), 771 strerror(-err), -err); 772 return err; 773 } 774 } 775 } 776 // disallow further allocation 777 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 778 } 779 780 // push blank buffers to previous window if requested 781 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 782 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 783 } 784 785 mNativeWindow = nativeWindow; 786 mNativeWindowUsageBits = usageBits; 787 return OK; 788} 789 790status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 791 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 792 793 CHECK(mDealer[portIndex] == NULL); 794 CHECK(mBuffers[portIndex].isEmpty()); 795 796 status_t err; 797 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 798 if (storingMetadataInDecodedBuffers()) { 799 err = allocateOutputMetadataBuffers(); 800 } else { 801 err = allocateOutputBuffersFromNativeWindow(); 802 } 803 } else { 804 OMX_PARAM_PORTDEFINITIONTYPE def; 805 InitOMXParams(&def); 806 def.nPortIndex = portIndex; 807 808 err = mOMXNode->getParameter( 809 OMX_IndexParamPortDefinition, &def, sizeof(def)); 810 811 if (err == OK) { 812 MetadataBufferType type = 813 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 814 size_t bufSize = def.nBufferSize; 815 if (type == kMetadataBufferTypeANWBuffer) { 816 bufSize = sizeof(VideoNativeMetadata); 817 } else if (type == kMetadataBufferTypeNativeHandleSource) { 818 bufSize = sizeof(VideoNativeHandleMetadata); 819 } 820 821 // If using gralloc or native source input metadata buffers, allocate largest 822 // metadata size as we prefer to generate native source metadata, but component 823 // may require gralloc source. For camera source, allocate at least enough 824 // size for native metadata buffers. 825 size_t allottedSize = bufSize; 826 if (portIndex == kPortIndexInput && type == kMetadataBufferTypeANWBuffer) { 827 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 828 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 829 bufSize = max(bufSize, sizeof(VideoNativeMetadata)); 830 } 831 832 size_t conversionBufferSize = 0; 833 834 sp<DataConverter> converter = mConverter[portIndex]; 835 if (converter != NULL) { 836 // here we assume sane conversions of max 4:1, so result fits in int32 837 if (portIndex == kPortIndexInput) { 838 conversionBufferSize = converter->sourceSize(bufSize); 839 } else { 840 conversionBufferSize = converter->targetSize(bufSize); 841 } 842 } 843 844 size_t alignment = MemoryDealer::getAllocationAlignment(); 845 846 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", 847 mComponentName.c_str(), 848 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 849 portIndex == kPortIndexInput ? "input" : "output"); 850 851 // verify buffer sizes to avoid overflow in align() 852 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { 853 ALOGE("b/22885421"); 854 return NO_MEMORY; 855 } 856 857 // don't modify bufSize as OMX may not expect it to increase after negotiation 858 size_t alignedSize = align(bufSize, alignment); 859 size_t alignedConvSize = align(conversionBufferSize, alignment); 860 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { 861 ALOGE("b/22885421"); 862 return NO_MEMORY; 863 } 864 865 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); 866 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 867 868 const sp<AMessage> &format = portIndex == kPortIndexInput ? mInputFormat : mOutputFormat; 869 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 870 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 871 if (mem == NULL || mem->pointer() == NULL) { 872 return NO_MEMORY; 873 } 874 875 BufferInfo info; 876 info.mStatus = BufferInfo::OWNED_BY_US; 877 info.mFenceFd = -1; 878 info.mRenderInfo = NULL; 879 info.mNativeHandle = NULL; 880 881 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 882 mem.clear(); 883 884 void *ptr = NULL; 885 sp<NativeHandle> native_handle; 886 err = mOMXNode->allocateSecureBuffer( 887 portIndex, bufSize, &info.mBufferID, 888 &ptr, &native_handle); 889 890 // TRICKY: this representation is unorthodox, but ACodec requires 891 // an ABuffer with a proper size to validate range offsets and lengths. 892 // Since mData is never referenced for secure input, it is used to store 893 // either the pointer to the secure buffer, or the opaque handle as on 894 // some devices ptr is actually an opaque handle, not a pointer. 895 896 // TRICKY2: use native handle as the base of the ABuffer if received one, 897 // because Widevine source only receives these base addresses. 898 const native_handle_t *native_handle_ptr = 899 native_handle == NULL ? NULL : native_handle->handle(); 900 info.mData = new MediaCodecBuffer(format, 901 new ABuffer(ptr != NULL ? ptr : (void *)native_handle_ptr, bufSize)); 902 info.mNativeHandle = native_handle; 903 info.mCodecData = info.mData; 904 } else { 905 err = mOMXNode->useBuffer( 906 portIndex, mem, &info.mBufferID, allottedSize); 907 } 908 909 if (mem != NULL) { 910 info.mCodecData = new SharedMemoryBuffer(format, mem); 911 info.mCodecRef = mem; 912 913 if (type == kMetadataBufferTypeANWBuffer) { 914 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 915 } 916 917 // if we require conversion, allocate conversion buffer for client use; 918 // otherwise, reuse codec buffer 919 if (mConverter[portIndex] != NULL) { 920 CHECK_GT(conversionBufferSize, (size_t)0); 921 mem = mDealer[portIndex]->allocate(conversionBufferSize); 922 if (mem == NULL|| mem->pointer() == NULL) { 923 return NO_MEMORY; 924 } 925 info.mData = new SharedMemoryBuffer(format, mem); 926 info.mMemRef = mem; 927 } else { 928 info.mData = info.mCodecData; 929 info.mMemRef = info.mCodecRef; 930 } 931 } 932 933 mBuffers[portIndex].push(info); 934 } 935 } 936 } 937 938 if (err != OK) { 939 return err; 940 } 941 942 sp<AMessage> notify = mNotify->dup(); 943 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 944 945 notify->setInt32("portIndex", portIndex); 946 947 sp<PortDescription> desc = new PortDescription; 948 949 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 950 const BufferInfo &info = mBuffers[portIndex][i]; 951 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef); 952 } 953 954 notify->setObject("portDesc", desc); 955 notify->post(); 956 957 return OK; 958} 959 960status_t ACodec::setupNativeWindowSizeFormatAndUsage( 961 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */, 962 bool reconnect) { 963 OMX_PARAM_PORTDEFINITIONTYPE def; 964 InitOMXParams(&def); 965 def.nPortIndex = kPortIndexOutput; 966 967 status_t err = mOMXNode->getParameter( 968 OMX_IndexParamPortDefinition, &def, sizeof(def)); 969 970 if (err != OK) { 971 return err; 972 } 973 974 OMX_U32 usage = 0; 975 err = mOMXNode->getGraphicBufferUsage(kPortIndexOutput, &usage); 976 if (err != 0) { 977 ALOGW("querying usage flags from OMX IL component failed: %d", err); 978 // XXX: Currently this error is logged, but not fatal. 979 usage = 0; 980 } 981 int omxUsage = usage; 982 983 if (mFlags & kFlagIsGrallocUsageProtected) { 984 usage |= GRALLOC_USAGE_PROTECTED; 985 } 986 987 usage |= kVideoGrallocUsage; 988 *finalUsage = usage; 989 990 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 991 mLastNativeWindowDataSpace = HAL_DATASPACE_UNKNOWN; 992 993 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 994 return setNativeWindowSizeFormatAndUsage( 995 nativeWindow, 996 def.format.video.nFrameWidth, 997 def.format.video.nFrameHeight, 998 def.format.video.eColorFormat, 999 mRotationDegrees, 1000 usage, 1001 reconnect); 1002} 1003 1004status_t ACodec::configureOutputBuffersFromNativeWindow( 1005 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 1006 OMX_U32 *minUndequeuedBuffers, bool preregister) { 1007 1008 OMX_PARAM_PORTDEFINITIONTYPE def; 1009 InitOMXParams(&def); 1010 def.nPortIndex = kPortIndexOutput; 1011 1012 status_t err = mOMXNode->getParameter( 1013 OMX_IndexParamPortDefinition, &def, sizeof(def)); 1014 1015 if (err == OK) { 1016 err = setupNativeWindowSizeFormatAndUsage( 1017 mNativeWindow.get(), &mNativeWindowUsageBits, preregister /* reconnect */); 1018 } 1019 if (err != OK) { 1020 mNativeWindowUsageBits = 0; 1021 return err; 1022 } 1023 1024 // Exits here for tunneled video playback codecs -- i.e. skips native window 1025 // buffer allocation step as this is managed by the tunneled OMX omponent 1026 // itself and explicitly sets def.nBufferCountActual to 0. 1027 if (mTunneled) { 1028 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 1029 def.nBufferCountActual = 0; 1030 err = mOMXNode->setParameter( 1031 OMX_IndexParamPortDefinition, &def, sizeof(def)); 1032 1033 *minUndequeuedBuffers = 0; 1034 *bufferCount = 0; 1035 *bufferSize = 0; 1036 return err; 1037 } 1038 1039 *minUndequeuedBuffers = 0; 1040 err = mNativeWindow->query( 1041 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 1042 (int *)minUndequeuedBuffers); 1043 1044 if (err != 0) { 1045 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 1046 strerror(-err), -err); 1047 return err; 1048 } 1049 1050 // FIXME: assume that surface is controlled by app (native window 1051 // returns the number for the case when surface is not controlled by app) 1052 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 1053 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 1054 1055 // Use conservative allocation while also trying to reduce starvation 1056 // 1057 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 1058 // minimum needed for the consumer to be able to work 1059 // 2. try to allocate two (2) additional buffers to reduce starvation from 1060 // the consumer 1061 // plus an extra buffer to account for incorrect minUndequeuedBufs 1062 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 1063 OMX_U32 newBufferCount = 1064 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1065 def.nBufferCountActual = newBufferCount; 1066 err = mOMXNode->setParameter( 1067 OMX_IndexParamPortDefinition, &def, sizeof(def)); 1068 1069 if (err == OK) { 1070 *minUndequeuedBuffers += extraBuffers; 1071 break; 1072 } 1073 1074 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1075 mComponentName.c_str(), newBufferCount, err); 1076 /* exit condition */ 1077 if (extraBuffers == 0) { 1078 return err; 1079 } 1080 } 1081 1082 err = native_window_set_buffer_count( 1083 mNativeWindow.get(), def.nBufferCountActual); 1084 1085 if (err != 0) { 1086 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1087 -err); 1088 return err; 1089 } 1090 1091 *bufferCount = def.nBufferCountActual; 1092 *bufferSize = def.nBufferSize; 1093 return err; 1094} 1095 1096status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1097 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1098 status_t err = configureOutputBuffersFromNativeWindow( 1099 &bufferCount, &bufferSize, &minUndequeuedBuffers, true /* preregister */); 1100 if (err != 0) 1101 return err; 1102 mNumUndequeuedBuffers = minUndequeuedBuffers; 1103 1104 if (!storingMetadataInDecodedBuffers()) { 1105 static_cast<Surface*>(mNativeWindow.get()) 1106 ->getIGraphicBufferProducer()->allowAllocation(true); 1107 } 1108 1109 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1110 "output port", 1111 mComponentName.c_str(), bufferCount, bufferSize); 1112 1113 // Dequeue buffers and send them to OMX 1114 for (OMX_U32 i = 0; i < bufferCount; i++) { 1115 ANativeWindowBuffer *buf; 1116 int fenceFd; 1117 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1118 if (err != 0) { 1119 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1120 break; 1121 } 1122 1123 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1124 BufferInfo info; 1125 info.mStatus = BufferInfo::OWNED_BY_US; 1126 info.mFenceFd = fenceFd; 1127 info.mIsReadFence = false; 1128 info.mRenderInfo = NULL; 1129 info.mGraphicBuffer = graphicBuffer; 1130 mBuffers[kPortIndexOutput].push(info); 1131 1132 IOMX::buffer_id bufferId; 1133 err = mOMXNode->useGraphicBuffer( 1134 kPortIndexOutput, graphicBuffer, &bufferId); 1135 if (err != 0) { 1136 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1137 "%d", i, err); 1138 break; 1139 } 1140 1141 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1142 1143 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1144 mComponentName.c_str(), 1145 bufferId, graphicBuffer.get()); 1146 } 1147 1148 OMX_U32 cancelStart; 1149 OMX_U32 cancelEnd; 1150 1151 if (err != 0) { 1152 // If an error occurred while dequeuing we need to cancel any buffers 1153 // that were dequeued. 1154 cancelStart = 0; 1155 cancelEnd = mBuffers[kPortIndexOutput].size(); 1156 } else { 1157 // Return the required minimum undequeued buffers to the native window. 1158 cancelStart = bufferCount - minUndequeuedBuffers; 1159 cancelEnd = bufferCount; 1160 } 1161 1162 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1163 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1164 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1165 status_t error = cancelBufferToNativeWindow(info); 1166 if (err == 0) { 1167 err = error; 1168 } 1169 } 1170 } 1171 1172 if (!storingMetadataInDecodedBuffers()) { 1173 static_cast<Surface*>(mNativeWindow.get()) 1174 ->getIGraphicBufferProducer()->allowAllocation(false); 1175 } 1176 1177 return err; 1178} 1179 1180status_t ACodec::allocateOutputMetadataBuffers() { 1181 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1182 status_t err = configureOutputBuffersFromNativeWindow( 1183 &bufferCount, &bufferSize, &minUndequeuedBuffers, 1184 mLegacyAdaptiveExperiment /* preregister */); 1185 if (err != 0) 1186 return err; 1187 mNumUndequeuedBuffers = minUndequeuedBuffers; 1188 1189 ALOGV("[%s] Allocating %u meta buffers on output port", 1190 mComponentName.c_str(), bufferCount); 1191 1192 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1193 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1194 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1195 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1196 1197 // Dequeue buffers and send them to OMX 1198 for (OMX_U32 i = 0; i < bufferCount; i++) { 1199 BufferInfo info; 1200 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1201 info.mFenceFd = -1; 1202 info.mRenderInfo = NULL; 1203 info.mGraphicBuffer = NULL; 1204 info.mDequeuedAt = mDequeueCounter; 1205 1206 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1207 if (mem == NULL || mem->pointer() == NULL) { 1208 return NO_MEMORY; 1209 } 1210 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1211 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1212 } 1213 info.mData = new SharedMemoryBuffer(mOutputFormat, mem); 1214 info.mMemRef = mem; 1215 info.mCodecData = info.mData; 1216 info.mCodecRef = mem; 1217 1218 // we use useBuffer for metadata regardless of quirks 1219 err = mOMXNode->useBuffer( 1220 kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1221 mBuffers[kPortIndexOutput].push(info); 1222 1223 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1224 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1225 } 1226 1227 if (mLegacyAdaptiveExperiment) { 1228 // preallocate and preregister buffers 1229 static_cast<Surface *>(mNativeWindow.get()) 1230 ->getIGraphicBufferProducer()->allowAllocation(true); 1231 1232 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1233 "output port", 1234 mComponentName.c_str(), bufferCount, bufferSize); 1235 1236 // Dequeue buffers then cancel them all 1237 for (OMX_U32 i = 0; i < bufferCount; i++) { 1238 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1239 1240 ANativeWindowBuffer *buf; 1241 int fenceFd; 1242 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1243 if (err != 0) { 1244 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1245 break; 1246 } 1247 1248 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1249 mOMXNode->updateGraphicBufferInMeta( 1250 kPortIndexOutput, graphicBuffer, info->mBufferID); 1251 info->mStatus = BufferInfo::OWNED_BY_US; 1252 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1253 info->mGraphicBuffer = graphicBuffer; 1254 } 1255 1256 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1257 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1258 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1259 status_t error = cancelBufferToNativeWindow(info); 1260 if (err == OK) { 1261 err = error; 1262 } 1263 } 1264 } 1265 1266 static_cast<Surface*>(mNativeWindow.get()) 1267 ->getIGraphicBufferProducer()->allowAllocation(false); 1268 } 1269 1270 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1271 return err; 1272} 1273 1274status_t ACodec::submitOutputMetadataBuffer() { 1275 CHECK(storingMetadataInDecodedBuffers()); 1276 if (mMetadataBuffersToSubmit == 0) 1277 return OK; 1278 1279 BufferInfo *info = dequeueBufferFromNativeWindow(); 1280 if (info == NULL) { 1281 return ERROR_IO; 1282 } 1283 1284 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1285 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1286 1287 --mMetadataBuffersToSubmit; 1288 info->checkWriteFence("submitOutputMetadataBuffer"); 1289 status_t err = mOMXNode->fillBuffer(info->mBufferID, info->mFenceFd); 1290 info->mFenceFd = -1; 1291 if (err == OK) { 1292 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1293 } 1294 1295 return err; 1296} 1297 1298status_t ACodec::waitForFence(int fd, const char *dbg ) { 1299 status_t res = OK; 1300 if (fd >= 0) { 1301 sp<Fence> fence = new Fence(fd); 1302 res = fence->wait(IOMX::kFenceTimeoutMs); 1303 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1304 } 1305 return res; 1306} 1307 1308// static 1309const char *ACodec::_asString(BufferInfo::Status s) { 1310 switch (s) { 1311 case BufferInfo::OWNED_BY_US: return "OUR"; 1312 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1313 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1314 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1315 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1316 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1317 default: return "?"; 1318 } 1319} 1320 1321void ACodec::dumpBuffers(OMX_U32 portIndex) { 1322 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1323 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1324 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1325 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1326 const BufferInfo &info = mBuffers[portIndex][i]; 1327 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1328 i, info.mBufferID, info.mGraphicBuffer.get(), 1329 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1330 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1331 } 1332} 1333 1334status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1335 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1336 1337 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1338 mComponentName.c_str(), info->mBufferID); 1339 1340 info->checkWriteFence("cancelBufferToNativeWindow"); 1341 int err = mNativeWindow->cancelBuffer( 1342 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1343 info->mFenceFd = -1; 1344 1345 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1346 mComponentName.c_str(), info->mBufferID); 1347 // change ownership even if cancelBuffer fails 1348 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1349 1350 return err; 1351} 1352 1353void ACodec::updateRenderInfoForDequeuedBuffer( 1354 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1355 1356 info->mRenderInfo = 1357 mRenderTracker.updateInfoForDequeuedBuffer( 1358 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1359 1360 // check for any fences already signaled 1361 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1362} 1363 1364void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1365 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1366 mRenderTracker.dumpRenderQueue(); 1367 } 1368} 1369 1370void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1371 sp<AMessage> msg = mNotify->dup(); 1372 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1373 std::list<FrameRenderTracker::Info> done = 1374 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1375 1376 // unlink untracked frames 1377 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1378 it != done.cend(); ++it) { 1379 ssize_t index = it->getIndex(); 1380 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1381 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1382 } else if (index >= 0) { 1383 // THIS SHOULD NEVER HAPPEN 1384 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1385 } 1386 } 1387 1388 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1389 msg->post(); 1390 } 1391} 1392 1393ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1394 ANativeWindowBuffer *buf; 1395 CHECK(mNativeWindow.get() != NULL); 1396 1397 if (mTunneled) { 1398 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1399 " video playback mode mode!"); 1400 return NULL; 1401 } 1402 1403 if (mFatalError) { 1404 ALOGW("not dequeuing from native window due to fatal error"); 1405 return NULL; 1406 } 1407 1408 int fenceFd = -1; 1409 do { 1410 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1411 if (err != 0) { 1412 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1413 return NULL; 1414 } 1415 1416 bool stale = false; 1417 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1418 i--; 1419 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1420 1421 if (info->mGraphicBuffer != NULL && 1422 info->mGraphicBuffer->handle == buf->handle) { 1423 // Since consumers can attach buffers to BufferQueues, it is possible 1424 // that a known yet stale buffer can return from a surface that we 1425 // once used. We can simply ignore this as we have already dequeued 1426 // this buffer properly. NOTE: this does not eliminate all cases, 1427 // e.g. it is possible that we have queued the valid buffer to the 1428 // NW, and a stale copy of the same buffer gets dequeued - which will 1429 // be treated as the valid buffer by ACodec. 1430 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1431 ALOGI("dequeued stale buffer %p. discarding", buf); 1432 stale = true; 1433 break; 1434 } 1435 1436 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1437 info->mStatus = BufferInfo::OWNED_BY_US; 1438 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1439 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1440 return info; 1441 } 1442 } 1443 1444 // It is also possible to receive a previously unregistered buffer 1445 // in non-meta mode. These should be treated as stale buffers. The 1446 // same is possible in meta mode, in which case, it will be treated 1447 // as a normal buffer, which is not desirable. 1448 // TODO: fix this. 1449 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1450 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1451 stale = true; 1452 } 1453 if (stale) { 1454 // TODO: detach stale buffer, but there is no API yet to do it. 1455 buf = NULL; 1456 } 1457 } while (buf == NULL); 1458 1459 // get oldest undequeued buffer 1460 BufferInfo *oldest = NULL; 1461 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1462 i--; 1463 BufferInfo *info = 1464 &mBuffers[kPortIndexOutput].editItemAt(i); 1465 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1466 (oldest == NULL || 1467 // avoid potential issues from counter rolling over 1468 mDequeueCounter - info->mDequeuedAt > 1469 mDequeueCounter - oldest->mDequeuedAt)) { 1470 oldest = info; 1471 } 1472 } 1473 1474 // it is impossible dequeue a buffer when there are no buffers with ANW 1475 CHECK(oldest != NULL); 1476 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1477 // while loop above does not complete 1478 CHECK(storingMetadataInDecodedBuffers()); 1479 1480 // discard buffer in LRU info and replace with new buffer 1481 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1482 oldest->mStatus = BufferInfo::OWNED_BY_US; 1483 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1484 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1485 oldest->mRenderInfo = NULL; 1486 1487 mOMXNode->updateGraphicBufferInMeta( 1488 kPortIndexOutput, oldest->mGraphicBuffer, oldest->mBufferID); 1489 1490 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1491 VideoGrallocMetadata *grallocMeta = 1492 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->data()); 1493 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1494 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1495 mDequeueCounter - oldest->mDequeuedAt, 1496 (void *)(uintptr_t)grallocMeta->pHandle, 1497 oldest->mGraphicBuffer->handle, oldest->mData->data()); 1498 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1499 VideoNativeMetadata *nativeMeta = 1500 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->data()); 1501 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1502 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1503 mDequeueCounter - oldest->mDequeuedAt, 1504 (void *)(uintptr_t)nativeMeta->pBuffer, 1505 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->data()); 1506 } 1507 1508 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1509 return oldest; 1510} 1511 1512status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1513 status_t err = OK; 1514 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1515 i--; 1516 status_t err2 = freeBuffer(portIndex, i); 1517 if (err == OK) { 1518 err = err2; 1519 } 1520 } 1521 1522 // clear mDealer even on an error 1523 mDealer[portIndex].clear(); 1524 return err; 1525} 1526 1527status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1528 status_t err = OK; 1529 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1530 i--; 1531 BufferInfo *info = 1532 &mBuffers[kPortIndexOutput].editItemAt(i); 1533 1534 // At this time some buffers may still be with the component 1535 // or being drained. 1536 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1537 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1538 status_t err2 = freeBuffer(kPortIndexOutput, i); 1539 if (err == OK) { 1540 err = err2; 1541 } 1542 } 1543 } 1544 1545 return err; 1546} 1547 1548status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1549 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1550 status_t err = OK; 1551 1552 // there should not be any fences in the metadata 1553 MetadataBufferType type = 1554 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1555 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL 1556 && info->mData->size() >= sizeof(VideoNativeMetadata)) { 1557 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; 1558 if (fenceFd >= 0) { 1559 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1560 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1561 } 1562 } 1563 1564 switch (info->mStatus) { 1565 case BufferInfo::OWNED_BY_US: 1566 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1567 (void)cancelBufferToNativeWindow(info); 1568 } 1569 // fall through 1570 1571 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1572 err = mOMXNode->freeBuffer(portIndex, info->mBufferID); 1573 break; 1574 1575 default: 1576 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1577 err = FAILED_TRANSACTION; 1578 break; 1579 } 1580 1581 if (info->mFenceFd >= 0) { 1582 ::close(info->mFenceFd); 1583 } 1584 1585 if (portIndex == kPortIndexOutput) { 1586 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1587 info->mRenderInfo = NULL; 1588 } 1589 1590 // remove buffer even if mOMXNode->freeBuffer fails 1591 mBuffers[portIndex].removeAt(i); 1592 return err; 1593} 1594 1595ACodec::BufferInfo *ACodec::findBufferByID( 1596 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1597 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1598 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1599 1600 if (info->mBufferID == bufferID) { 1601 if (index != NULL) { 1602 *index = i; 1603 } 1604 return info; 1605 } 1606 } 1607 1608 ALOGE("Could not find buffer with ID %u", bufferID); 1609 return NULL; 1610} 1611 1612status_t ACodec::setComponentRole( 1613 bool isEncoder, const char *mime) { 1614 const char *role = GetComponentRole(isEncoder, mime); 1615 if (role == NULL) { 1616 return BAD_VALUE; 1617 } 1618 status_t err = SetComponentRole(mOMXNode, role); 1619 if (err != OK) { 1620 ALOGW("[%s] Failed to set standard component role '%s'.", 1621 mComponentName.c_str(), role); 1622 } 1623 return err; 1624} 1625 1626status_t ACodec::configureCodec( 1627 const char *mime, const sp<AMessage> &msg) { 1628 int32_t encoder; 1629 if (!msg->findInt32("encoder", &encoder)) { 1630 encoder = false; 1631 } 1632 1633 sp<AMessage> inputFormat = new AMessage; 1634 sp<AMessage> outputFormat = new AMessage; 1635 mConfigFormat = msg; 1636 1637 mIsEncoder = encoder; 1638 1639 mInputMetadataType = kMetadataBufferTypeInvalid; 1640 mOutputMetadataType = kMetadataBufferTypeInvalid; 1641 1642 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1643 1644 if (err != OK) { 1645 return err; 1646 } 1647 1648 int32_t bitRate = 0; 1649 // FLAC encoder doesn't need a bitrate, other encoders do 1650 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1651 && !msg->findInt32("bitrate", &bitRate)) { 1652 return INVALID_OPERATION; 1653 } 1654 1655 // propagate bitrate to the output so that the muxer has it 1656 if (encoder && msg->findInt32("bitrate", &bitRate)) { 1657 // Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the 1658 // average bitrate. We've been setting both bitrate and max-bitrate to this same value. 1659 outputFormat->setInt32("bitrate", bitRate); 1660 outputFormat->setInt32("max-bitrate", bitRate); 1661 } 1662 1663 int32_t storeMeta; 1664 if (encoder 1665 && msg->findInt32("android._input-metadata-buffer-type", &storeMeta) 1666 && storeMeta != kMetadataBufferTypeInvalid) { 1667 mInputMetadataType = (MetadataBufferType)storeMeta; 1668 err = mOMXNode->storeMetaDataInBuffers( 1669 kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1670 if (err != OK) { 1671 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1672 mComponentName.c_str(), err); 1673 1674 return err; 1675 } else if (storeMeta == kMetadataBufferTypeANWBuffer 1676 && mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1677 // IOMX translates ANWBuffers to gralloc source already. 1678 mInputMetadataType = (MetadataBufferType)storeMeta; 1679 } 1680 1681 uint32_t usageBits; 1682 if (mOMXNode->getParameter( 1683 (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1684 &usageBits, sizeof(usageBits)) == OK) { 1685 inputFormat->setInt32( 1686 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1687 } 1688 } 1689 1690 int32_t prependSPSPPS = 0; 1691 if (encoder 1692 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1693 && prependSPSPPS != 0) { 1694 OMX_INDEXTYPE index; 1695 err = mOMXNode->getExtensionIndex( 1696 "OMX.google.android.index.prependSPSPPSToIDRFrames", &index); 1697 1698 if (err == OK) { 1699 PrependSPSPPSToIDRFramesParams params; 1700 InitOMXParams(¶ms); 1701 params.bEnable = OMX_TRUE; 1702 1703 err = mOMXNode->setParameter(index, ¶ms, sizeof(params)); 1704 } 1705 1706 if (err != OK) { 1707 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1708 "IDR frames. (err %d)", err); 1709 1710 return err; 1711 } 1712 } 1713 1714 // Only enable metadata mode on encoder output if encoder can prepend 1715 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1716 // opaque handle, to which we don't have access. 1717 int32_t video = !strncasecmp(mime, "video/", 6); 1718 mIsVideo = video; 1719 if (encoder && video) { 1720 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1721 && msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta) 1722 && storeMeta != 0); 1723 1724 mOutputMetadataType = kMetadataBufferTypeNativeHandleSource; 1725 err = mOMXNode->storeMetaDataInBuffers(kPortIndexOutput, enable, &mOutputMetadataType); 1726 if (err != OK) { 1727 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1728 mComponentName.c_str(), err); 1729 } 1730 1731 if (!msg->findInt64( 1732 "repeat-previous-frame-after", 1733 &mRepeatFrameDelayUs)) { 1734 mRepeatFrameDelayUs = -1ll; 1735 } 1736 1737 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1738 mMaxPtsGapUs = -1ll; 1739 } 1740 1741 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1742 mMaxFps = -1; 1743 } 1744 1745 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1746 mTimePerCaptureUs = -1ll; 1747 } 1748 1749 if (!msg->findInt32( 1750 "create-input-buffers-suspended", 1751 (int32_t*)&mCreateInputBuffersSuspended)) { 1752 mCreateInputBuffersSuspended = false; 1753 } 1754 } 1755 1756 // NOTE: we only use native window for video decoders 1757 sp<RefBase> obj; 1758 bool haveNativeWindow = msg->findObject("native-window", &obj) 1759 && obj != NULL && video && !encoder; 1760 mUsingNativeWindow = haveNativeWindow; 1761 mLegacyAdaptiveExperiment = false; 1762 if (video && !encoder) { 1763 inputFormat->setInt32("adaptive-playback", false); 1764 1765 int32_t usageProtected; 1766 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1767 if (!haveNativeWindow) { 1768 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1769 return PERMISSION_DENIED; 1770 } 1771 mFlags |= kFlagIsGrallocUsageProtected; 1772 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1773 } 1774 1775 if (mFlags & kFlagIsSecure) { 1776 // use native_handles for secure input buffers 1777 err = mOMXNode->enableNativeBuffers( 1778 kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1779 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1780 err = OK; // ignore error for now 1781 } 1782 } 1783 if (haveNativeWindow) { 1784 sp<ANativeWindow> nativeWindow = 1785 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1786 1787 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1788 int32_t autoFrc; 1789 if (msg->findInt32("auto-frc", &autoFrc)) { 1790 bool enabled = autoFrc; 1791 OMX_CONFIG_BOOLEANTYPE config; 1792 InitOMXParams(&config); 1793 config.bEnabled = (OMX_BOOL)enabled; 1794 status_t temp = mOMXNode->setConfig( 1795 (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1796 &config, sizeof(config)); 1797 if (temp == OK) { 1798 outputFormat->setInt32("auto-frc", enabled); 1799 } else if (enabled) { 1800 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1801 } 1802 } 1803 // END of temporary support for automatic FRC 1804 1805 int32_t tunneled; 1806 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1807 tunneled != 0) { 1808 ALOGI("Configuring TUNNELED video playback."); 1809 mTunneled = true; 1810 1811 int32_t audioHwSync = 0; 1812 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1813 ALOGW("No Audio HW Sync provided for video tunnel"); 1814 } 1815 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1816 if (err != OK) { 1817 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1818 audioHwSync, nativeWindow.get()); 1819 return err; 1820 } 1821 1822 int32_t maxWidth = 0, maxHeight = 0; 1823 if (msg->findInt32("max-width", &maxWidth) && 1824 msg->findInt32("max-height", &maxHeight)) { 1825 1826 err = mOMXNode->prepareForAdaptivePlayback( 1827 kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1828 if (err != OK) { 1829 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1830 mComponentName.c_str(), err); 1831 // allow failure 1832 err = OK; 1833 } else { 1834 inputFormat->setInt32("max-width", maxWidth); 1835 inputFormat->setInt32("max-height", maxHeight); 1836 inputFormat->setInt32("adaptive-playback", true); 1837 } 1838 } 1839 } else { 1840 ALOGV("Configuring CPU controlled video playback."); 1841 mTunneled = false; 1842 1843 // Explicity reset the sideband handle of the window for 1844 // non-tunneled video in case the window was previously used 1845 // for a tunneled video playback. 1846 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1847 if (err != OK) { 1848 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1849 return err; 1850 } 1851 1852 // Always try to enable dynamic output buffers on native surface 1853 mOutputMetadataType = kMetadataBufferTypeANWBuffer; 1854 err = mOMXNode->storeMetaDataInBuffers( 1855 kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1856 if (err != OK) { 1857 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1858 mComponentName.c_str(), err); 1859 1860 // if adaptive playback has been requested, try JB fallback 1861 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1862 // LARGE MEMORY REQUIREMENT 1863 1864 // we will not do adaptive playback on software accessed 1865 // surfaces as they never had to respond to changes in the 1866 // crop window, and we don't trust that they will be able to. 1867 int usageBits = 0; 1868 bool canDoAdaptivePlayback; 1869 1870 if (nativeWindow->query( 1871 nativeWindow.get(), 1872 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1873 &usageBits) != OK) { 1874 canDoAdaptivePlayback = false; 1875 } else { 1876 canDoAdaptivePlayback = 1877 (usageBits & 1878 (GRALLOC_USAGE_SW_READ_MASK | 1879 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1880 } 1881 1882 int32_t maxWidth = 0, maxHeight = 0; 1883 if (canDoAdaptivePlayback && 1884 msg->findInt32("max-width", &maxWidth) && 1885 msg->findInt32("max-height", &maxHeight)) { 1886 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1887 mComponentName.c_str(), maxWidth, maxHeight); 1888 1889 err = mOMXNode->prepareForAdaptivePlayback( 1890 kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1891 ALOGW_IF(err != OK, 1892 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1893 mComponentName.c_str(), err); 1894 1895 if (err == OK) { 1896 inputFormat->setInt32("max-width", maxWidth); 1897 inputFormat->setInt32("max-height", maxHeight); 1898 inputFormat->setInt32("adaptive-playback", true); 1899 } 1900 } 1901 // allow failure 1902 err = OK; 1903 } else { 1904 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1905 mComponentName.c_str()); 1906 CHECK(storingMetadataInDecodedBuffers()); 1907 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 1908 "legacy-adaptive", !msg->contains("no-experiments")); 1909 1910 inputFormat->setInt32("adaptive-playback", true); 1911 } 1912 1913 int32_t push; 1914 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 1915 && push != 0) { 1916 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1917 } 1918 } 1919 1920 int32_t rotationDegrees; 1921 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 1922 mRotationDegrees = rotationDegrees; 1923 } else { 1924 mRotationDegrees = 0; 1925 } 1926 } 1927 1928 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 1929 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 1930 // invalid encodings will default to PCM-16bit in setupRawAudioFormat. 1931 1932 if (video) { 1933 // determine need for software renderer 1934 bool usingSwRenderer = false; 1935 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 1936 usingSwRenderer = true; 1937 haveNativeWindow = false; 1938 } 1939 1940 if (encoder) { 1941 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 1942 } else { 1943 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 1944 } 1945 1946 if (err != OK) { 1947 return err; 1948 } 1949 1950 if (haveNativeWindow) { 1951 mNativeWindow = static_cast<Surface *>(obj.get()); 1952 } 1953 1954 // initialize native window now to get actual output format 1955 // TODO: this is needed for some encoders even though they don't use native window 1956 err = initNativeWindow(); 1957 if (err != OK) { 1958 return err; 1959 } 1960 1961 // fallback for devices that do not handle flex-YUV for native buffers 1962 if (haveNativeWindow) { 1963 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 1964 if (msg->findInt32("color-format", &requestedColorFormat) && 1965 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 1966 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 1967 if (err != OK) { 1968 return err; 1969 } 1970 int32_t colorFormat = OMX_COLOR_FormatUnused; 1971 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 1972 if (!outputFormat->findInt32("color-format", &colorFormat)) { 1973 ALOGE("ouptut port did not have a color format (wrong domain?)"); 1974 return BAD_VALUE; 1975 } 1976 ALOGD("[%s] Requested output format %#x and got %#x.", 1977 mComponentName.c_str(), requestedColorFormat, colorFormat); 1978 if (!IsFlexibleColorFormat( 1979 mOMXNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 1980 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 1981 // device did not handle flex-YUV request for native window, fall back 1982 // to SW renderer 1983 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 1984 mNativeWindow.clear(); 1985 mNativeWindowUsageBits = 0; 1986 haveNativeWindow = false; 1987 usingSwRenderer = true; 1988 if (storingMetadataInDecodedBuffers()) { 1989 err = mOMXNode->storeMetaDataInBuffers( 1990 kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 1991 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 1992 // TODO: implement adaptive-playback support for bytebuffer mode. 1993 // This is done by SW codecs, but most HW codecs don't support it. 1994 inputFormat->setInt32("adaptive-playback", false); 1995 } 1996 if (err == OK) { 1997 err = mOMXNode->enableNativeBuffers( 1998 kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 1999 } 2000 if (mFlags & kFlagIsGrallocUsageProtected) { 2001 // fallback is not supported for protected playback 2002 err = PERMISSION_DENIED; 2003 } else if (err == OK) { 2004 err = setupVideoDecoder( 2005 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2006 } 2007 } 2008 } 2009 } 2010 2011 if (usingSwRenderer) { 2012 outputFormat->setInt32("using-sw-renderer", 1); 2013 } 2014 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2015 int32_t numChannels, sampleRate; 2016 if (!msg->findInt32("channel-count", &numChannels) 2017 || !msg->findInt32("sample-rate", &sampleRate)) { 2018 // Since we did not always check for these, leave them optional 2019 // and have the decoder figure it all out. 2020 err = OK; 2021 } else { 2022 err = setupRawAudioFormat( 2023 encoder ? kPortIndexInput : kPortIndexOutput, 2024 sampleRate, 2025 numChannels); 2026 } 2027 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2028 int32_t numChannels, sampleRate; 2029 if (!msg->findInt32("channel-count", &numChannels) 2030 || !msg->findInt32("sample-rate", &sampleRate)) { 2031 err = INVALID_OPERATION; 2032 } else { 2033 int32_t isADTS, aacProfile; 2034 int32_t sbrMode; 2035 int32_t maxOutputChannelCount; 2036 int32_t pcmLimiterEnable; 2037 drcParams_t drc; 2038 if (!msg->findInt32("is-adts", &isADTS)) { 2039 isADTS = 0; 2040 } 2041 if (!msg->findInt32("aac-profile", &aacProfile)) { 2042 aacProfile = OMX_AUDIO_AACObjectNull; 2043 } 2044 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2045 sbrMode = -1; 2046 } 2047 2048 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2049 maxOutputChannelCount = -1; 2050 } 2051 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2052 // value is unknown 2053 pcmLimiterEnable = -1; 2054 } 2055 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2056 // value is unknown 2057 drc.encodedTargetLevel = -1; 2058 } 2059 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2060 // value is unknown 2061 drc.drcCut = -1; 2062 } 2063 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2064 // value is unknown 2065 drc.drcBoost = -1; 2066 } 2067 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2068 // value is unknown 2069 drc.heavyCompression = -1; 2070 } 2071 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2072 // value is unknown 2073 drc.targetRefLevel = -1; 2074 } 2075 2076 err = setupAACCodec( 2077 encoder, numChannels, sampleRate, bitRate, aacProfile, 2078 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2079 pcmLimiterEnable); 2080 } 2081 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2082 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2083 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2084 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2085 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2086 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2087 // These are PCM-like formats with a fixed sample rate but 2088 // a variable number of channels. 2089 2090 int32_t numChannels; 2091 if (!msg->findInt32("channel-count", &numChannels)) { 2092 err = INVALID_OPERATION; 2093 } else { 2094 int32_t sampleRate; 2095 if (!msg->findInt32("sample-rate", &sampleRate)) { 2096 sampleRate = 8000; 2097 } 2098 err = setupG711Codec(encoder, sampleRate, numChannels); 2099 } 2100 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2101 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2102 if (encoder && 2103 (!msg->findInt32("channel-count", &numChannels) 2104 || !msg->findInt32("sample-rate", &sampleRate))) { 2105 ALOGE("missing channel count or sample rate for FLAC encoder"); 2106 err = INVALID_OPERATION; 2107 } else { 2108 if (encoder) { 2109 if (!msg->findInt32( 2110 "complexity", &compressionLevel) && 2111 !msg->findInt32( 2112 "flac-compression-level", &compressionLevel)) { 2113 compressionLevel = 5; // default FLAC compression level 2114 } else if (compressionLevel < 0) { 2115 ALOGW("compression level %d outside [0..8] range, " 2116 "using 0", 2117 compressionLevel); 2118 compressionLevel = 0; 2119 } else if (compressionLevel > 8) { 2120 ALOGW("compression level %d outside [0..8] range, " 2121 "using 8", 2122 compressionLevel); 2123 compressionLevel = 8; 2124 } 2125 } 2126 err = setupFlacCodec( 2127 encoder, numChannels, sampleRate, compressionLevel); 2128 } 2129 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2130 int32_t numChannels, sampleRate; 2131 if (encoder 2132 || !msg->findInt32("channel-count", &numChannels) 2133 || !msg->findInt32("sample-rate", &sampleRate)) { 2134 err = INVALID_OPERATION; 2135 } else { 2136 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); 2137 } 2138 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2139 int32_t numChannels; 2140 int32_t sampleRate; 2141 if (!msg->findInt32("channel-count", &numChannels) 2142 || !msg->findInt32("sample-rate", &sampleRate)) { 2143 err = INVALID_OPERATION; 2144 } else { 2145 err = setupAC3Codec(encoder, numChannels, sampleRate); 2146 } 2147 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2148 int32_t numChannels; 2149 int32_t sampleRate; 2150 if (!msg->findInt32("channel-count", &numChannels) 2151 || !msg->findInt32("sample-rate", &sampleRate)) { 2152 err = INVALID_OPERATION; 2153 } else { 2154 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2155 } 2156 } 2157 2158 if (err != OK) { 2159 return err; 2160 } 2161 2162 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2163 mEncoderDelay = 0; 2164 } 2165 2166 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2167 mEncoderPadding = 0; 2168 } 2169 2170 if (msg->findInt32("channel-mask", &mChannelMask)) { 2171 mChannelMaskPresent = true; 2172 } else { 2173 mChannelMaskPresent = false; 2174 } 2175 2176 int32_t maxInputSize; 2177 if (msg->findInt32("max-input-size", &maxInputSize)) { 2178 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2179 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2180 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2181 } 2182 2183 int32_t priority; 2184 if (msg->findInt32("priority", &priority)) { 2185 err = setPriority(priority); 2186 } 2187 2188 int32_t rateInt = -1; 2189 float rateFloat = -1; 2190 if (!msg->findFloat("operating-rate", &rateFloat)) { 2191 msg->findInt32("operating-rate", &rateInt); 2192 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2193 } 2194 if (rateFloat > 0) { 2195 err = setOperatingRate(rateFloat, video); 2196 } 2197 2198 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame. 2199 mBaseOutputFormat = outputFormat; 2200 // trigger a kWhatOutputFormatChanged msg on first buffer 2201 mLastOutputFormat.clear(); 2202 2203 err = getPortFormat(kPortIndexInput, inputFormat); 2204 if (err == OK) { 2205 err = getPortFormat(kPortIndexOutput, outputFormat); 2206 if (err == OK) { 2207 mInputFormat = inputFormat; 2208 mOutputFormat = outputFormat; 2209 } 2210 } 2211 2212 // create data converters if needed 2213 if (!video && err == OK) { 2214 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 2215 if (encoder) { 2216 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2217 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); 2218 if (mConverter[kPortIndexInput] != NULL) { 2219 mInputFormat->setInt32("pcm-encoding", pcmEncoding); 2220 } 2221 } else { 2222 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2223 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 2224 if (mConverter[kPortIndexOutput] != NULL) { 2225 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 2226 } 2227 } 2228 } 2229 2230 return err; 2231} 2232 2233status_t ACodec::setPriority(int32_t priority) { 2234 if (priority < 0) { 2235 return BAD_VALUE; 2236 } 2237 OMX_PARAM_U32TYPE config; 2238 InitOMXParams(&config); 2239 config.nU32 = (OMX_U32)priority; 2240 status_t temp = mOMXNode->setConfig( 2241 (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2242 &config, sizeof(config)); 2243 if (temp != OK) { 2244 ALOGI("codec does not support config priority (err %d)", temp); 2245 } 2246 return OK; 2247} 2248 2249status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2250 if (rateFloat < 0) { 2251 return BAD_VALUE; 2252 } 2253 OMX_U32 rate; 2254 if (isVideo) { 2255 if (rateFloat > 65535) { 2256 return BAD_VALUE; 2257 } 2258 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2259 } else { 2260 if (rateFloat > UINT_MAX) { 2261 return BAD_VALUE; 2262 } 2263 rate = (OMX_U32)(rateFloat); 2264 } 2265 OMX_PARAM_U32TYPE config; 2266 InitOMXParams(&config); 2267 config.nU32 = rate; 2268 status_t err = mOMXNode->setConfig( 2269 (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2270 &config, sizeof(config)); 2271 if (err != OK) { 2272 ALOGI("codec does not support config operating rate (err %d)", err); 2273 } 2274 return OK; 2275} 2276 2277status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2278 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2279 InitOMXParams(¶ms); 2280 params.nPortIndex = kPortIndexOutput; 2281 status_t err = mOMXNode->getConfig( 2282 (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2283 if (err == OK) { 2284 *intraRefreshPeriod = params.nRefreshPeriod; 2285 return OK; 2286 } 2287 2288 // Fallback to query through standard OMX index. 2289 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2290 InitOMXParams(&refreshParams); 2291 refreshParams.nPortIndex = kPortIndexOutput; 2292 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2293 err = mOMXNode->getParameter( 2294 OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2295 if (err != OK || refreshParams.nCirMBs == 0) { 2296 *intraRefreshPeriod = 0; 2297 return OK; 2298 } 2299 2300 // Calculate period based on width and height 2301 uint32_t width, height; 2302 OMX_PARAM_PORTDEFINITIONTYPE def; 2303 InitOMXParams(&def); 2304 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2305 def.nPortIndex = kPortIndexOutput; 2306 err = mOMXNode->getParameter( 2307 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2308 if (err != OK) { 2309 *intraRefreshPeriod = 0; 2310 return err; 2311 } 2312 width = video_def->nFrameWidth; 2313 height = video_def->nFrameHeight; 2314 // Use H.264/AVC MacroBlock size 16x16 2315 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2316 2317 return OK; 2318} 2319 2320status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2321 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2322 InitOMXParams(¶ms); 2323 params.nPortIndex = kPortIndexOutput; 2324 params.nRefreshPeriod = intraRefreshPeriod; 2325 status_t err = mOMXNode->setConfig( 2326 (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2327 if (err == OK) { 2328 return OK; 2329 } 2330 2331 // Only in configure state, a component could invoke setParameter. 2332 if (!inConfigure) { 2333 return INVALID_OPERATION; 2334 } else { 2335 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2336 } 2337 2338 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2339 InitOMXParams(&refreshParams); 2340 refreshParams.nPortIndex = kPortIndexOutput; 2341 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2342 2343 if (intraRefreshPeriod == 0) { 2344 // 0 means disable intra refresh. 2345 refreshParams.nCirMBs = 0; 2346 } else { 2347 // Calculate macroblocks that need to be intra coded base on width and height 2348 uint32_t width, height; 2349 OMX_PARAM_PORTDEFINITIONTYPE def; 2350 InitOMXParams(&def); 2351 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2352 def.nPortIndex = kPortIndexOutput; 2353 err = mOMXNode->getParameter( 2354 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2355 if (err != OK) { 2356 return err; 2357 } 2358 width = video_def->nFrameWidth; 2359 height = video_def->nFrameHeight; 2360 // Use H.264/AVC MacroBlock size 16x16 2361 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2362 } 2363 2364 err = mOMXNode->setParameter( 2365 OMX_IndexParamVideoIntraRefresh, 2366 &refreshParams, sizeof(refreshParams)); 2367 if (err != OK) { 2368 return err; 2369 } 2370 2371 return OK; 2372} 2373 2374status_t ACodec::configureTemporalLayers( 2375 const sp<AMessage> &msg, bool inConfigure, sp<AMessage> &outputFormat) { 2376 if (!mIsVideo || !mIsEncoder) { 2377 return INVALID_OPERATION; 2378 } 2379 2380 AString tsSchema; 2381 if (!msg->findString("ts-schema", &tsSchema)) { 2382 return OK; 2383 } 2384 2385 unsigned int numLayers = 0; 2386 unsigned int numBLayers = 0; 2387 int tags; 2388 char dummy; 2389 OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE pattern = 2390 OMX_VIDEO_AndroidTemporalLayeringPatternNone; 2391 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1 2392 && numLayers > 0) { 2393 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC; 2394 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c", 2395 &numLayers, &dummy, &numBLayers, &dummy)) 2396 && (tags == 1 || (tags == 3 && dummy == '+')) 2397 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) { 2398 numLayers += numBLayers; 2399 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid; 2400 } else { 2401 ALOGI("Ignoring unsupported ts-schema [%s]", tsSchema.c_str()); 2402 return BAD_VALUE; 2403 } 2404 2405 OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layerParams; 2406 InitOMXParams(&layerParams); 2407 layerParams.nPortIndex = kPortIndexOutput; 2408 2409 status_t err = mOMXNode->getParameter( 2410 (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2411 &layerParams, sizeof(layerParams)); 2412 2413 if (err != OK) { 2414 return err; 2415 } else if (!(layerParams.eSupportedPatterns & pattern)) { 2416 return BAD_VALUE; 2417 } 2418 2419 numLayers = min(numLayers, layerParams.nLayerCountMax); 2420 numBLayers = min(numBLayers, layerParams.nBLayerCountMax); 2421 2422 if (!inConfigure) { 2423 OMX_VIDEO_CONFIG_ANDROID_TEMPORALLAYERINGTYPE layerConfig; 2424 InitOMXParams(&layerConfig); 2425 layerConfig.nPortIndex = kPortIndexOutput; 2426 layerConfig.ePattern = pattern; 2427 layerConfig.nPLayerCountActual = numLayers - numBLayers; 2428 layerConfig.nBLayerCountActual = numBLayers; 2429 layerConfig.bBitrateRatiosSpecified = OMX_FALSE; 2430 2431 err = mOMXNode->setConfig( 2432 (OMX_INDEXTYPE)OMX_IndexConfigAndroidVideoTemporalLayering, 2433 &layerConfig, sizeof(layerConfig)); 2434 } else { 2435 layerParams.ePattern = pattern; 2436 layerParams.nPLayerCountActual = numLayers - numBLayers; 2437 layerParams.nBLayerCountActual = numBLayers; 2438 layerParams.bBitrateRatiosSpecified = OMX_FALSE; 2439 2440 err = mOMXNode->setParameter( 2441 (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2442 &layerParams, sizeof(layerParams)); 2443 } 2444 2445 AString configSchema; 2446 if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternAndroid) { 2447 configSchema = AStringPrintf("android.generic.%u+%u", numLayers - numBLayers, numBLayers); 2448 } else if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC) { 2449 configSchema = AStringPrintf("webrtc.vp8.%u", numLayers); 2450 } 2451 2452 if (err != OK) { 2453 ALOGW("Failed to set temporal layers to %s (requested %s)", 2454 configSchema.c_str(), tsSchema.c_str()); 2455 return err; 2456 } 2457 2458 err = mOMXNode->getParameter( 2459 (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2460 &layerParams, sizeof(layerParams)); 2461 2462 if (err == OK) { 2463 ALOGD("Temporal layers requested:%s configured:%s got:%s(%u: P=%u, B=%u)", 2464 tsSchema.c_str(), configSchema.c_str(), 2465 asString(layerParams.ePattern), layerParams.ePattern, 2466 layerParams.nPLayerCountActual, layerParams.nBLayerCountActual); 2467 2468 if (outputFormat.get() == mOutputFormat.get()) { 2469 mOutputFormat = mOutputFormat->dup(); // trigger an output format change event 2470 } 2471 // assume we got what we configured 2472 outputFormat->setString("ts-schema", configSchema); 2473 } 2474 return err; 2475} 2476 2477status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2478 OMX_PARAM_PORTDEFINITIONTYPE def; 2479 InitOMXParams(&def); 2480 def.nPortIndex = portIndex; 2481 2482 status_t err = mOMXNode->getParameter( 2483 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2484 2485 if (err != OK) { 2486 return err; 2487 } 2488 2489 if (def.nBufferSize >= size) { 2490 return OK; 2491 } 2492 2493 def.nBufferSize = size; 2494 2495 err = mOMXNode->setParameter( 2496 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2497 2498 if (err != OK) { 2499 return err; 2500 } 2501 2502 err = mOMXNode->getParameter( 2503 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2504 2505 if (err != OK) { 2506 return err; 2507 } 2508 2509 if (def.nBufferSize < size) { 2510 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2511 return FAILED_TRANSACTION; 2512 } 2513 2514 return OK; 2515} 2516 2517status_t ACodec::selectAudioPortFormat( 2518 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2519 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2520 InitOMXParams(&format); 2521 2522 format.nPortIndex = portIndex; 2523 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2524 format.nIndex = index; 2525 status_t err = mOMXNode->getParameter( 2526 OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2527 2528 if (err != OK) { 2529 return err; 2530 } 2531 2532 if (format.eEncoding == desiredFormat) { 2533 break; 2534 } 2535 2536 if (index == kMaxIndicesToCheck) { 2537 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2538 mComponentName.c_str(), index, 2539 asString(format.eEncoding), format.eEncoding); 2540 return ERROR_UNSUPPORTED; 2541 } 2542 } 2543 2544 return mOMXNode->setParameter( 2545 OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2546} 2547 2548status_t ACodec::setupAACCodec( 2549 bool encoder, int32_t numChannels, int32_t sampleRate, 2550 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2551 int32_t maxOutputChannelCount, const drcParams_t& drc, 2552 int32_t pcmLimiterEnable) { 2553 if (encoder && isADTS) { 2554 return -EINVAL; 2555 } 2556 2557 status_t err = setupRawAudioFormat( 2558 encoder ? kPortIndexInput : kPortIndexOutput, 2559 sampleRate, 2560 numChannels); 2561 2562 if (err != OK) { 2563 return err; 2564 } 2565 2566 if (encoder) { 2567 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2568 2569 if (err != OK) { 2570 return err; 2571 } 2572 2573 OMX_PARAM_PORTDEFINITIONTYPE def; 2574 InitOMXParams(&def); 2575 def.nPortIndex = kPortIndexOutput; 2576 2577 err = mOMXNode->getParameter( 2578 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2579 2580 if (err != OK) { 2581 return err; 2582 } 2583 2584 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2585 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2586 2587 err = mOMXNode->setParameter( 2588 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2589 2590 if (err != OK) { 2591 return err; 2592 } 2593 2594 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2595 InitOMXParams(&profile); 2596 profile.nPortIndex = kPortIndexOutput; 2597 2598 err = mOMXNode->getParameter( 2599 OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2600 2601 if (err != OK) { 2602 return err; 2603 } 2604 2605 profile.nChannels = numChannels; 2606 2607 profile.eChannelMode = 2608 (numChannels == 1) 2609 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2610 2611 profile.nSampleRate = sampleRate; 2612 profile.nBitRate = bitRate; 2613 profile.nAudioBandWidth = 0; 2614 profile.nFrameLength = 0; 2615 profile.nAACtools = OMX_AUDIO_AACToolAll; 2616 profile.nAACERtools = OMX_AUDIO_AACERNone; 2617 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2618 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2619 switch (sbrMode) { 2620 case 0: 2621 // disable sbr 2622 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2623 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2624 break; 2625 case 1: 2626 // enable single-rate sbr 2627 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2628 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2629 break; 2630 case 2: 2631 // enable dual-rate sbr 2632 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2633 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2634 break; 2635 case -1: 2636 // enable both modes -> the codec will decide which mode should be used 2637 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2638 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2639 break; 2640 default: 2641 // unsupported sbr mode 2642 return BAD_VALUE; 2643 } 2644 2645 2646 err = mOMXNode->setParameter( 2647 OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2648 2649 if (err != OK) { 2650 return err; 2651 } 2652 2653 return err; 2654 } 2655 2656 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2657 InitOMXParams(&profile); 2658 profile.nPortIndex = kPortIndexInput; 2659 2660 err = mOMXNode->getParameter( 2661 OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2662 2663 if (err != OK) { 2664 return err; 2665 } 2666 2667 profile.nChannels = numChannels; 2668 profile.nSampleRate = sampleRate; 2669 2670 profile.eAACStreamFormat = 2671 isADTS 2672 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2673 : OMX_AUDIO_AACStreamFormatMP4FF; 2674 2675 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2676 InitOMXParams(&presentation); 2677 presentation.nMaxOutputChannels = maxOutputChannelCount; 2678 presentation.nDrcCut = drc.drcCut; 2679 presentation.nDrcBoost = drc.drcBoost; 2680 presentation.nHeavyCompression = drc.heavyCompression; 2681 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2682 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2683 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2684 2685 status_t res = mOMXNode->setParameter( 2686 OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2687 if (res == OK) { 2688 // optional parameters, will not cause configuration failure 2689 mOMXNode->setParameter( 2690 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2691 &presentation, sizeof(presentation)); 2692 } else { 2693 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2694 } 2695 mSampleRate = sampleRate; 2696 return res; 2697} 2698 2699status_t ACodec::setupAC3Codec( 2700 bool encoder, int32_t numChannels, int32_t sampleRate) { 2701 status_t err = setupRawAudioFormat( 2702 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2703 2704 if (err != OK) { 2705 return err; 2706 } 2707 2708 if (encoder) { 2709 ALOGW("AC3 encoding is not supported."); 2710 return INVALID_OPERATION; 2711 } 2712 2713 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2714 InitOMXParams(&def); 2715 def.nPortIndex = kPortIndexInput; 2716 2717 err = mOMXNode->getParameter( 2718 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, &def, sizeof(def)); 2719 2720 if (err != OK) { 2721 return err; 2722 } 2723 2724 def.nChannels = numChannels; 2725 def.nSampleRate = sampleRate; 2726 2727 return mOMXNode->setParameter( 2728 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, &def, sizeof(def)); 2729} 2730 2731status_t ACodec::setupEAC3Codec( 2732 bool encoder, int32_t numChannels, int32_t sampleRate) { 2733 status_t err = setupRawAudioFormat( 2734 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2735 2736 if (err != OK) { 2737 return err; 2738 } 2739 2740 if (encoder) { 2741 ALOGW("EAC3 encoding is not supported."); 2742 return INVALID_OPERATION; 2743 } 2744 2745 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2746 InitOMXParams(&def); 2747 def.nPortIndex = kPortIndexInput; 2748 2749 err = mOMXNode->getParameter( 2750 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, &def, sizeof(def)); 2751 2752 if (err != OK) { 2753 return err; 2754 } 2755 2756 def.nChannels = numChannels; 2757 def.nSampleRate = sampleRate; 2758 2759 return mOMXNode->setParameter( 2760 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, &def, sizeof(def)); 2761} 2762 2763static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2764 bool isAMRWB, int32_t bps) { 2765 if (isAMRWB) { 2766 if (bps <= 6600) { 2767 return OMX_AUDIO_AMRBandModeWB0; 2768 } else if (bps <= 8850) { 2769 return OMX_AUDIO_AMRBandModeWB1; 2770 } else if (bps <= 12650) { 2771 return OMX_AUDIO_AMRBandModeWB2; 2772 } else if (bps <= 14250) { 2773 return OMX_AUDIO_AMRBandModeWB3; 2774 } else if (bps <= 15850) { 2775 return OMX_AUDIO_AMRBandModeWB4; 2776 } else if (bps <= 18250) { 2777 return OMX_AUDIO_AMRBandModeWB5; 2778 } else if (bps <= 19850) { 2779 return OMX_AUDIO_AMRBandModeWB6; 2780 } else if (bps <= 23050) { 2781 return OMX_AUDIO_AMRBandModeWB7; 2782 } 2783 2784 // 23850 bps 2785 return OMX_AUDIO_AMRBandModeWB8; 2786 } else { // AMRNB 2787 if (bps <= 4750) { 2788 return OMX_AUDIO_AMRBandModeNB0; 2789 } else if (bps <= 5150) { 2790 return OMX_AUDIO_AMRBandModeNB1; 2791 } else if (bps <= 5900) { 2792 return OMX_AUDIO_AMRBandModeNB2; 2793 } else if (bps <= 6700) { 2794 return OMX_AUDIO_AMRBandModeNB3; 2795 } else if (bps <= 7400) { 2796 return OMX_AUDIO_AMRBandModeNB4; 2797 } else if (bps <= 7950) { 2798 return OMX_AUDIO_AMRBandModeNB5; 2799 } else if (bps <= 10200) { 2800 return OMX_AUDIO_AMRBandModeNB6; 2801 } 2802 2803 // 12200 bps 2804 return OMX_AUDIO_AMRBandModeNB7; 2805 } 2806} 2807 2808status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2809 OMX_AUDIO_PARAM_AMRTYPE def; 2810 InitOMXParams(&def); 2811 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2812 2813 status_t err = mOMXNode->getParameter( 2814 OMX_IndexParamAudioAmr, &def, sizeof(def)); 2815 2816 if (err != OK) { 2817 return err; 2818 } 2819 2820 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2821 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2822 2823 err = mOMXNode->setParameter( 2824 OMX_IndexParamAudioAmr, &def, sizeof(def)); 2825 2826 if (err != OK) { 2827 return err; 2828 } 2829 2830 return setupRawAudioFormat( 2831 encoder ? kPortIndexInput : kPortIndexOutput, 2832 isWAMR ? 16000 : 8000 /* sampleRate */, 2833 1 /* numChannels */); 2834} 2835 2836status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2837 if (encoder) { 2838 return INVALID_OPERATION; 2839 } 2840 2841 return setupRawAudioFormat( 2842 kPortIndexInput, sampleRate, numChannels); 2843} 2844 2845status_t ACodec::setupFlacCodec( 2846 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2847 2848 if (encoder) { 2849 OMX_AUDIO_PARAM_FLACTYPE def; 2850 InitOMXParams(&def); 2851 def.nPortIndex = kPortIndexOutput; 2852 2853 // configure compression level 2854 status_t err = mOMXNode->getParameter(OMX_IndexParamAudioFlac, &def, sizeof(def)); 2855 if (err != OK) { 2856 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2857 return err; 2858 } 2859 def.nCompressionLevel = compressionLevel; 2860 err = mOMXNode->setParameter(OMX_IndexParamAudioFlac, &def, sizeof(def)); 2861 if (err != OK) { 2862 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2863 return err; 2864 } 2865 } 2866 2867 return setupRawAudioFormat( 2868 encoder ? kPortIndexInput : kPortIndexOutput, 2869 sampleRate, 2870 numChannels); 2871} 2872 2873status_t ACodec::setupRawAudioFormat( 2874 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { 2875 OMX_PARAM_PORTDEFINITIONTYPE def; 2876 InitOMXParams(&def); 2877 def.nPortIndex = portIndex; 2878 2879 status_t err = mOMXNode->getParameter( 2880 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2881 2882 if (err != OK) { 2883 return err; 2884 } 2885 2886 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2887 2888 err = mOMXNode->setParameter( 2889 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2890 2891 if (err != OK) { 2892 return err; 2893 } 2894 2895 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2896 InitOMXParams(&pcmParams); 2897 pcmParams.nPortIndex = portIndex; 2898 2899 err = mOMXNode->getParameter( 2900 OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2901 2902 if (err != OK) { 2903 return err; 2904 } 2905 2906 pcmParams.nChannels = numChannels; 2907 switch (encoding) { 2908 case kAudioEncodingPcm8bit: 2909 pcmParams.eNumData = OMX_NumericalDataUnsigned; 2910 pcmParams.nBitPerSample = 8; 2911 break; 2912 case kAudioEncodingPcmFloat: 2913 pcmParams.eNumData = OMX_NumericalDataFloat; 2914 pcmParams.nBitPerSample = 32; 2915 break; 2916 case kAudioEncodingPcm16bit: 2917 pcmParams.eNumData = OMX_NumericalDataSigned; 2918 pcmParams.nBitPerSample = 16; 2919 break; 2920 default: 2921 return BAD_VALUE; 2922 } 2923 pcmParams.bInterleaved = OMX_TRUE; 2924 pcmParams.nSamplingRate = sampleRate; 2925 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 2926 2927 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 2928 return OMX_ErrorNone; 2929 } 2930 2931 err = mOMXNode->setParameter( 2932 OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2933 // if we could not set up raw format to non-16-bit, try with 16-bit 2934 // NOTE: we will also verify this via readback, in case codec ignores these fields 2935 if (err != OK && encoding != kAudioEncodingPcm16bit) { 2936 pcmParams.eNumData = OMX_NumericalDataSigned; 2937 pcmParams.nBitPerSample = 16; 2938 err = mOMXNode->setParameter( 2939 OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2940 } 2941 return err; 2942} 2943 2944status_t ACodec::configureTunneledVideoPlayback( 2945 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 2946 native_handle_t* sidebandHandle; 2947 2948 status_t err = mOMXNode->configureVideoTunnelMode( 2949 kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 2950 if (err != OK) { 2951 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 2952 return err; 2953 } 2954 2955 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 2956 if (err != OK) { 2957 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 2958 sidebandHandle, err); 2959 return err; 2960 } 2961 2962 return OK; 2963} 2964 2965status_t ACodec::setVideoPortFormatType( 2966 OMX_U32 portIndex, 2967 OMX_VIDEO_CODINGTYPE compressionFormat, 2968 OMX_COLOR_FORMATTYPE colorFormat, 2969 bool usingNativeBuffers) { 2970 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2971 InitOMXParams(&format); 2972 format.nPortIndex = portIndex; 2973 format.nIndex = 0; 2974 bool found = false; 2975 2976 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2977 format.nIndex = index; 2978 status_t err = mOMXNode->getParameter( 2979 OMX_IndexParamVideoPortFormat, 2980 &format, sizeof(format)); 2981 2982 if (err != OK) { 2983 return err; 2984 } 2985 2986 // substitute back flexible color format to codec supported format 2987 OMX_U32 flexibleEquivalent; 2988 if (compressionFormat == OMX_VIDEO_CodingUnused 2989 && IsFlexibleColorFormat( 2990 mOMXNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 2991 && colorFormat == flexibleEquivalent) { 2992 ALOGI("[%s] using color format %#x in place of %#x", 2993 mComponentName.c_str(), format.eColorFormat, colorFormat); 2994 colorFormat = format.eColorFormat; 2995 } 2996 2997 // The following assertion is violated by TI's video decoder. 2998 // CHECK_EQ(format.nIndex, index); 2999 3000 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 3001 if (portIndex == kPortIndexInput 3002 && colorFormat == format.eColorFormat) { 3003 // eCompressionFormat does not seem right. 3004 found = true; 3005 break; 3006 } 3007 if (portIndex == kPortIndexOutput 3008 && compressionFormat == format.eCompressionFormat) { 3009 // eColorFormat does not seem right. 3010 found = true; 3011 break; 3012 } 3013 } 3014 3015 if (format.eCompressionFormat == compressionFormat 3016 && format.eColorFormat == colorFormat) { 3017 found = true; 3018 break; 3019 } 3020 3021 if (index == kMaxIndicesToCheck) { 3022 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 3023 mComponentName.c_str(), index, 3024 asString(format.eCompressionFormat), format.eCompressionFormat, 3025 asString(format.eColorFormat), format.eColorFormat); 3026 } 3027 } 3028 3029 if (!found) { 3030 return UNKNOWN_ERROR; 3031 } 3032 3033 status_t err = mOMXNode->setParameter( 3034 OMX_IndexParamVideoPortFormat, &format, sizeof(format)); 3035 3036 return err; 3037} 3038 3039// Set optimal output format. OMX component lists output formats in the order 3040// of preference, but this got more complicated since the introduction of flexible 3041// YUV formats. We support a legacy behavior for applications that do not use 3042// surface output, do not specify an output format, but expect a "usable" standard 3043// OMX format. SW readable and standard formats must be flex-YUV. 3044// 3045// Suggested preference order: 3046// - optimal format for texture rendering (mediaplayer behavior) 3047// - optimal SW readable & texture renderable format (flex-YUV support) 3048// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 3049// - legacy "usable" standard formats 3050// 3051// For legacy support, we prefer a standard format, but will settle for a SW readable 3052// flex-YUV format. 3053status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 3054 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 3055 InitOMXParams(&format); 3056 format.nPortIndex = kPortIndexOutput; 3057 3058 InitOMXParams(&legacyFormat); 3059 // this field will change when we find a suitable legacy format 3060 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 3061 3062 for (OMX_U32 index = 0; ; ++index) { 3063 format.nIndex = index; 3064 status_t err = mOMXNode->getParameter( 3065 OMX_IndexParamVideoPortFormat, &format, sizeof(format)); 3066 if (err != OK) { 3067 // no more formats, pick legacy format if found 3068 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 3069 memcpy(&format, &legacyFormat, sizeof(format)); 3070 break; 3071 } 3072 return err; 3073 } 3074 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 3075 return OMX_ErrorBadParameter; 3076 } 3077 if (!getLegacyFlexibleFormat) { 3078 break; 3079 } 3080 // standard formats that were exposed to users before 3081 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 3082 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 3083 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 3084 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 3085 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 3086 break; 3087 } 3088 // find best legacy non-standard format 3089 OMX_U32 flexibleEquivalent; 3090 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 3091 && IsFlexibleColorFormat( 3092 mOMXNode, format.eColorFormat, false /* usingNativeBuffers */, 3093 &flexibleEquivalent) 3094 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 3095 memcpy(&legacyFormat, &format, sizeof(format)); 3096 } 3097 } 3098 return mOMXNode->setParameter( 3099 OMX_IndexParamVideoPortFormat, &format, sizeof(format)); 3100} 3101 3102static const struct VideoCodingMapEntry { 3103 const char *mMime; 3104 OMX_VIDEO_CODINGTYPE mVideoCodingType; 3105} kVideoCodingMapEntry[] = { 3106 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 3107 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 3108 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 3109 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 3110 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 3111 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 3112 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 3113 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 3114}; 3115 3116static status_t GetVideoCodingTypeFromMime( 3117 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3118 for (size_t i = 0; 3119 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3120 ++i) { 3121 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3122 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3123 return OK; 3124 } 3125 } 3126 3127 *codingType = OMX_VIDEO_CodingUnused; 3128 3129 return ERROR_UNSUPPORTED; 3130} 3131 3132static status_t GetMimeTypeForVideoCoding( 3133 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3134 for (size_t i = 0; 3135 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3136 ++i) { 3137 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3138 *mime = kVideoCodingMapEntry[i].mMime; 3139 return OK; 3140 } 3141 } 3142 3143 mime->clear(); 3144 3145 return ERROR_UNSUPPORTED; 3146} 3147 3148status_t ACodec::setPortBufferNum(OMX_U32 portIndex, int bufferNum) { 3149 OMX_PARAM_PORTDEFINITIONTYPE def; 3150 InitOMXParams(&def); 3151 def.nPortIndex = portIndex; 3152 status_t err; 3153 ALOGD("Setting [%s] %s port buffer number: %d", mComponentName.c_str(), 3154 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3155 err = mOMXNode->getParameter( 3156 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3157 if (err != OK) { 3158 return err; 3159 } 3160 def.nBufferCountActual = bufferNum; 3161 err = mOMXNode->setParameter( 3162 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3163 if (err != OK) { 3164 // Component could reject this request. 3165 ALOGW("Fail to set [%s] %s port buffer number: %d", mComponentName.c_str(), 3166 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3167 } 3168 return OK; 3169} 3170 3171status_t ACodec::setupVideoDecoder( 3172 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3173 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3174 int32_t width, height; 3175 if (!msg->findInt32("width", &width) 3176 || !msg->findInt32("height", &height)) { 3177 return INVALID_OPERATION; 3178 } 3179 3180 OMX_VIDEO_CODINGTYPE compressionFormat; 3181 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3182 3183 if (err != OK) { 3184 return err; 3185 } 3186 3187 if (compressionFormat == OMX_VIDEO_CodingVP9) { 3188 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 3189 InitOMXParams(¶ms); 3190 params.nPortIndex = kPortIndexInput; 3191 // Check if VP9 decoder advertises supported profiles. 3192 params.nProfileIndex = 0; 3193 status_t err = mOMXNode->getParameter( 3194 OMX_IndexParamVideoProfileLevelQuerySupported, 3195 ¶ms, sizeof(params)); 3196 mIsLegacyVP9Decoder = err != OK; 3197 } 3198 3199 err = setVideoPortFormatType( 3200 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3201 3202 if (err != OK) { 3203 return err; 3204 } 3205 3206 int32_t tmp; 3207 if (msg->findInt32("color-format", &tmp)) { 3208 OMX_COLOR_FORMATTYPE colorFormat = 3209 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3210 err = setVideoPortFormatType( 3211 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3212 if (err != OK) { 3213 ALOGW("[%s] does not support color format %d", 3214 mComponentName.c_str(), colorFormat); 3215 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3216 } 3217 } else { 3218 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3219 } 3220 3221 if (err != OK) { 3222 return err; 3223 } 3224 3225 // Set the component input buffer number to be |tmp|. If succeed, 3226 // component will set input port buffer number to be |tmp|. If fail, 3227 // component will keep the same buffer number as before. 3228 if (msg->findInt32("android._num-input-buffers", &tmp)) { 3229 err = setPortBufferNum(kPortIndexInput, tmp); 3230 if (err != OK) 3231 return err; 3232 } 3233 3234 // Set the component output buffer number to be |tmp|. If succeed, 3235 // component will set output port buffer number to be |tmp|. If fail, 3236 // component will keep the same buffer number as before. 3237 if (msg->findInt32("android._num-output-buffers", &tmp)) { 3238 err = setPortBufferNum(kPortIndexOutput, tmp); 3239 if (err != OK) 3240 return err; 3241 } 3242 3243 int32_t frameRateInt; 3244 float frameRateFloat; 3245 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3246 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3247 frameRateInt = -1; 3248 } 3249 frameRateFloat = (float)frameRateInt; 3250 } 3251 3252 err = setVideoFormatOnPort( 3253 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3254 3255 if (err != OK) { 3256 return err; 3257 } 3258 3259 err = setVideoFormatOnPort( 3260 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3261 3262 if (err != OK) { 3263 return err; 3264 } 3265 3266 err = setColorAspectsForVideoDecoder( 3267 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3268 if (err == ERROR_UNSUPPORTED) { // support is optional 3269 err = OK; 3270 } 3271 3272 if (err != OK) { 3273 return err; 3274 } 3275 3276 err = setHDRStaticInfoForVideoCodec(kPortIndexOutput, msg, outputFormat); 3277 if (err == ERROR_UNSUPPORTED) { // support is optional 3278 err = OK; 3279 } 3280 return err; 3281} 3282 3283status_t ACodec::initDescribeColorAspectsIndex() { 3284 status_t err = mOMXNode->getExtensionIndex( 3285 "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3286 if (err != OK) { 3287 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3288 } 3289 return err; 3290} 3291 3292status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3293 status_t err = ERROR_UNSUPPORTED; 3294 if (mDescribeColorAspectsIndex) { 3295 err = mOMXNode->setConfig(mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3296 } 3297 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3298 mComponentName.c_str(), 3299 params.sAspects.mRange, asString(params.sAspects.mRange), 3300 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3301 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3302 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3303 err, asString(err)); 3304 3305 if (verify && err == OK) { 3306 err = getCodecColorAspects(params); 3307 } 3308 3309 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3310 "[%s] setting color aspects failed even though codec advertises support", 3311 mComponentName.c_str()); 3312 return err; 3313} 3314 3315status_t ACodec::setColorAspectsForVideoDecoder( 3316 int32_t width, int32_t height, bool usingNativeWindow, 3317 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3318 DescribeColorAspectsParams params; 3319 InitOMXParams(¶ms); 3320 params.nPortIndex = kPortIndexOutput; 3321 3322 getColorAspectsFromFormat(configFormat, params.sAspects); 3323 if (usingNativeWindow) { 3324 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3325 // The default aspects will be set back to the output format during the 3326 // getFormat phase of configure(). Set non-Unspecified values back into the 3327 // format, in case component does not support this enumeration. 3328 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3329 } 3330 3331 (void)initDescribeColorAspectsIndex(); 3332 3333 // communicate color aspects to codec 3334 return setCodecColorAspects(params); 3335} 3336 3337status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3338 status_t err = ERROR_UNSUPPORTED; 3339 if (mDescribeColorAspectsIndex) { 3340 err = mOMXNode->getConfig(mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3341 } 3342 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3343 mComponentName.c_str(), 3344 params.sAspects.mRange, asString(params.sAspects.mRange), 3345 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3346 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3347 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3348 err, asString(err)); 3349 if (params.bRequestingDataSpace) { 3350 ALOGV("for dataspace %#x", params.nDataSpace); 3351 } 3352 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3353 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3354 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3355 mComponentName.c_str()); 3356 } 3357 return err; 3358} 3359 3360status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3361 DescribeColorAspectsParams params; 3362 InitOMXParams(¶ms); 3363 params.nPortIndex = kPortIndexInput; 3364 status_t err = getCodecColorAspects(params); 3365 if (err == OK) { 3366 // we only set encoder input aspects if codec supports them 3367 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3368 } 3369 return err; 3370} 3371 3372status_t ACodec::getDataSpace( 3373 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3374 bool tryCodec) { 3375 status_t err = OK; 3376 if (tryCodec) { 3377 // request dataspace guidance from codec. 3378 params.bRequestingDataSpace = OMX_TRUE; 3379 err = getCodecColorAspects(params); 3380 params.bRequestingDataSpace = OMX_FALSE; 3381 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3382 *dataSpace = (android_dataspace)params.nDataSpace; 3383 return err; 3384 } else if (err == ERROR_UNSUPPORTED) { 3385 // ignore not-implemented error for dataspace requests 3386 err = OK; 3387 } 3388 } 3389 3390 // this returns legacy versions if available 3391 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3392 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3393 "and dataspace %#x", 3394 mComponentName.c_str(), 3395 params.sAspects.mRange, asString(params.sAspects.mRange), 3396 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3397 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3398 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3399 *dataSpace); 3400 return err; 3401} 3402 3403 3404status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3405 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3406 android_dataspace *dataSpace) { 3407 DescribeColorAspectsParams params; 3408 InitOMXParams(¶ms); 3409 params.nPortIndex = kPortIndexOutput; 3410 3411 // reset default format and get resulting format 3412 getColorAspectsFromFormat(configFormat, params.sAspects); 3413 if (dataSpace != NULL) { 3414 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3415 } 3416 status_t err = setCodecColorAspects(params, true /* readBack */); 3417 3418 // we always set specified aspects for decoders 3419 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3420 3421 if (dataSpace != NULL) { 3422 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3423 if (err == OK) { 3424 err = res; 3425 } 3426 } 3427 3428 return err; 3429} 3430 3431// initial video encoder setup for bytebuffer mode 3432status_t ACodec::setColorAspectsForVideoEncoder( 3433 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3434 // copy config to output format as this is not exposed via getFormat 3435 copyColorConfig(configFormat, outputFormat); 3436 3437 DescribeColorAspectsParams params; 3438 InitOMXParams(¶ms); 3439 params.nPortIndex = kPortIndexInput; 3440 getColorAspectsFromFormat(configFormat, params.sAspects); 3441 3442 (void)initDescribeColorAspectsIndex(); 3443 3444 int32_t usingRecorder; 3445 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3446 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3447 int32_t width, height; 3448 if (configFormat->findInt32("width", &width) 3449 && configFormat->findInt32("height", &height)) { 3450 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3451 status_t err = getDataSpace( 3452 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3453 if (err != OK) { 3454 return err; 3455 } 3456 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3457 } 3458 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3459 } 3460 3461 // communicate color aspects to codec, but do not allow change of the platform aspects 3462 ColorAspects origAspects = params.sAspects; 3463 for (int triesLeft = 2; --triesLeft >= 0; ) { 3464 status_t err = setCodecColorAspects(params, true /* readBack */); 3465 if (err != OK 3466 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3467 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3468 return err; 3469 } 3470 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3471 mComponentName.c_str()); 3472 } 3473 return OK; 3474} 3475 3476status_t ACodec::setHDRStaticInfoForVideoCodec( 3477 OMX_U32 portIndex, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3478 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3479 3480 DescribeHDRStaticInfoParams params; 3481 InitOMXParams(¶ms); 3482 params.nPortIndex = portIndex; 3483 3484 HDRStaticInfo *info = ¶ms.sInfo; 3485 if (getHDRStaticInfoFromFormat(configFormat, info)) { 3486 setHDRStaticInfoIntoFormat(params.sInfo, outputFormat); 3487 } 3488 3489 (void)initDescribeHDRStaticInfoIndex(); 3490 3491 // communicate HDR static Info to codec 3492 return setHDRStaticInfo(params); 3493} 3494 3495// subsequent initial video encoder setup for surface mode 3496status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3497 android_dataspace *dataSpace /* nonnull */) { 3498 DescribeColorAspectsParams params; 3499 InitOMXParams(¶ms); 3500 params.nPortIndex = kPortIndexInput; 3501 ColorAspects &aspects = params.sAspects; 3502 3503 // reset default format and store resulting format into both input and output formats 3504 getColorAspectsFromFormat(mConfigFormat, aspects); 3505 int32_t width, height; 3506 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3507 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3508 } 3509 setColorAspectsIntoFormat(aspects, mInputFormat); 3510 setColorAspectsIntoFormat(aspects, mOutputFormat); 3511 3512 // communicate color aspects to codec, but do not allow any change 3513 ColorAspects origAspects = aspects; 3514 status_t err = OK; 3515 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3516 status_t err = setCodecColorAspects(params, true /* readBack */); 3517 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3518 break; 3519 } 3520 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3521 mComponentName.c_str()); 3522 } 3523 3524 *dataSpace = HAL_DATASPACE_BT709; 3525 aspects = origAspects; // restore desired color aspects 3526 status_t res = getDataSpace( 3527 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3528 if (err == OK) { 3529 err = res; 3530 } 3531 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3532 mInputFormat->setBuffer( 3533 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3534 3535 // update input format with codec supported color aspects (basically set unsupported 3536 // aspects to Unspecified) 3537 if (err == OK) { 3538 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3539 } 3540 3541 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3542 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3543 3544 return err; 3545} 3546 3547status_t ACodec::getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format) { 3548 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3549 DescribeHDRStaticInfoParams params; 3550 InitOMXParams(¶ms); 3551 params.nPortIndex = portIndex; 3552 3553 status_t err = getHDRStaticInfo(params); 3554 if (err == OK) { 3555 // we only set decodec output HDRStaticInfo if codec supports them 3556 setHDRStaticInfoIntoFormat(params.sInfo, format); 3557 } 3558 return err; 3559} 3560 3561status_t ACodec::initDescribeHDRStaticInfoIndex() { 3562 status_t err = mOMXNode->getExtensionIndex( 3563 "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex); 3564 if (err != OK) { 3565 mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0; 3566 } 3567 return err; 3568} 3569 3570status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) { 3571 status_t err = ERROR_UNSUPPORTED; 3572 if (mDescribeHDRStaticInfoIndex) { 3573 err = mOMXNode->setConfig(mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3574 } 3575 3576 const HDRStaticInfo *info = ¶ms.sInfo; 3577 ALOGV("[%s] setting HDRStaticInfo (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, " 3578 "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)", 3579 mComponentName.c_str(), 3580 info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y, 3581 info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y, 3582 info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance, 3583 info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel); 3584 3585 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3586 "[%s] setting HDRStaticInfo failed even though codec advertises support", 3587 mComponentName.c_str()); 3588 return err; 3589} 3590 3591status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms) { 3592 status_t err = ERROR_UNSUPPORTED; 3593 if (mDescribeHDRStaticInfoIndex) { 3594 err = mOMXNode->getConfig(mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3595 } 3596 3597 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3598 "[%s] getting HDRStaticInfo failed even though codec advertises support", 3599 mComponentName.c_str()); 3600 return err; 3601} 3602 3603status_t ACodec::setupVideoEncoder( 3604 const char *mime, const sp<AMessage> &msg, 3605 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3606 int32_t tmp; 3607 if (!msg->findInt32("color-format", &tmp)) { 3608 return INVALID_OPERATION; 3609 } 3610 3611 OMX_COLOR_FORMATTYPE colorFormat = 3612 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3613 3614 status_t err = setVideoPortFormatType( 3615 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3616 3617 if (err != OK) { 3618 ALOGE("[%s] does not support color format %d", 3619 mComponentName.c_str(), colorFormat); 3620 3621 return err; 3622 } 3623 3624 /* Input port configuration */ 3625 3626 OMX_PARAM_PORTDEFINITIONTYPE def; 3627 InitOMXParams(&def); 3628 3629 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3630 3631 def.nPortIndex = kPortIndexInput; 3632 3633 err = mOMXNode->getParameter( 3634 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3635 3636 if (err != OK) { 3637 return err; 3638 } 3639 3640 int32_t width, height, bitrate; 3641 if (!msg->findInt32("width", &width) 3642 || !msg->findInt32("height", &height) 3643 || !msg->findInt32("bitrate", &bitrate)) { 3644 return INVALID_OPERATION; 3645 } 3646 3647 video_def->nFrameWidth = width; 3648 video_def->nFrameHeight = height; 3649 3650 int32_t stride; 3651 if (!msg->findInt32("stride", &stride)) { 3652 stride = width; 3653 } 3654 3655 video_def->nStride = stride; 3656 3657 int32_t sliceHeight; 3658 if (!msg->findInt32("slice-height", &sliceHeight)) { 3659 sliceHeight = height; 3660 } 3661 3662 video_def->nSliceHeight = sliceHeight; 3663 3664 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3665 3666 float frameRate; 3667 if (!msg->findFloat("frame-rate", &frameRate)) { 3668 int32_t tmp; 3669 if (!msg->findInt32("frame-rate", &tmp)) { 3670 return INVALID_OPERATION; 3671 } 3672 frameRate = (float)tmp; 3673 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3674 } 3675 3676 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3677 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3678 // this is redundant as it was already set up in setVideoPortFormatType 3679 // FIXME for now skip this only for flexible YUV formats 3680 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3681 video_def->eColorFormat = colorFormat; 3682 } 3683 3684 err = mOMXNode->setParameter( 3685 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3686 3687 if (err != OK) { 3688 ALOGE("[%s] failed to set input port definition parameters.", 3689 mComponentName.c_str()); 3690 3691 return err; 3692 } 3693 3694 /* Output port configuration */ 3695 3696 OMX_VIDEO_CODINGTYPE compressionFormat; 3697 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3698 3699 if (err != OK) { 3700 return err; 3701 } 3702 3703 err = setVideoPortFormatType( 3704 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3705 3706 if (err != OK) { 3707 ALOGE("[%s] does not support compression format %d", 3708 mComponentName.c_str(), compressionFormat); 3709 3710 return err; 3711 } 3712 3713 def.nPortIndex = kPortIndexOutput; 3714 3715 err = mOMXNode->getParameter( 3716 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3717 3718 if (err != OK) { 3719 return err; 3720 } 3721 3722 video_def->nFrameWidth = width; 3723 video_def->nFrameHeight = height; 3724 video_def->xFramerate = 0; 3725 video_def->nBitrate = bitrate; 3726 video_def->eCompressionFormat = compressionFormat; 3727 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3728 3729 err = mOMXNode->setParameter( 3730 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3731 3732 if (err != OK) { 3733 ALOGE("[%s] failed to set output port definition parameters.", 3734 mComponentName.c_str()); 3735 3736 return err; 3737 } 3738 3739 int32_t intraRefreshPeriod = 0; 3740 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3741 && intraRefreshPeriod >= 0) { 3742 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3743 if (err != OK) { 3744 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3745 mComponentName.c_str()); 3746 err = OK; 3747 } 3748 } 3749 3750 switch (compressionFormat) { 3751 case OMX_VIDEO_CodingMPEG4: 3752 err = setupMPEG4EncoderParameters(msg); 3753 break; 3754 3755 case OMX_VIDEO_CodingH263: 3756 err = setupH263EncoderParameters(msg); 3757 break; 3758 3759 case OMX_VIDEO_CodingAVC: 3760 err = setupAVCEncoderParameters(msg); 3761 break; 3762 3763 case OMX_VIDEO_CodingHEVC: 3764 err = setupHEVCEncoderParameters(msg); 3765 break; 3766 3767 case OMX_VIDEO_CodingVP8: 3768 case OMX_VIDEO_CodingVP9: 3769 err = setupVPXEncoderParameters(msg, outputFormat); 3770 break; 3771 3772 default: 3773 break; 3774 } 3775 3776 if (err != OK) { 3777 return err; 3778 } 3779 3780 // Set up color aspects on input, but propagate them to the output format, as they will 3781 // not be read back from encoder. 3782 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3783 if (err == ERROR_UNSUPPORTED) { 3784 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3785 err = OK; 3786 } 3787 3788 if (err != OK) { 3789 return err; 3790 } 3791 3792 err = setHDRStaticInfoForVideoCodec(kPortIndexInput, msg, outputFormat); 3793 if (err == ERROR_UNSUPPORTED) { // support is optional 3794 ALOGI("[%s] cannot encode HDR static metadata. Ignoring.", mComponentName.c_str()); 3795 err = OK; 3796 } 3797 3798 if (err != OK) { 3799 return err; 3800 } 3801 3802 switch (compressionFormat) { 3803 case OMX_VIDEO_CodingAVC: 3804 case OMX_VIDEO_CodingHEVC: 3805 err = configureTemporalLayers(msg, true /* inConfigure */, outputFormat); 3806 if (err != OK) { 3807 err = OK; // ignore failure 3808 } 3809 break; 3810 3811 case OMX_VIDEO_CodingVP8: 3812 case OMX_VIDEO_CodingVP9: 3813 // TODO: do we need to support android.generic layering? webrtc layering is 3814 // already set up in setupVPXEncoderParameters. 3815 break; 3816 3817 default: 3818 break; 3819 } 3820 3821 if (err == OK) { 3822 ALOGI("setupVideoEncoder succeeded"); 3823 } 3824 3825 return err; 3826} 3827 3828status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3829 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3830 InitOMXParams(¶ms); 3831 params.nPortIndex = kPortIndexOutput; 3832 3833 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3834 3835 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3836 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3837 int32_t mbs; 3838 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3839 return INVALID_OPERATION; 3840 } 3841 params.nCirMBs = mbs; 3842 } 3843 3844 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3845 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3846 int32_t mbs; 3847 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3848 return INVALID_OPERATION; 3849 } 3850 params.nAirMBs = mbs; 3851 3852 int32_t ref; 3853 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3854 return INVALID_OPERATION; 3855 } 3856 params.nAirRef = ref; 3857 } 3858 3859 status_t err = mOMXNode->setParameter( 3860 OMX_IndexParamVideoIntraRefresh, ¶ms, sizeof(params)); 3861 return err; 3862} 3863 3864static OMX_U32 setPFramesSpacing( 3865 float iFramesInterval /* seconds */, int32_t frameRate, uint32_t BFramesSpacing = 0) { 3866 // BFramesSpacing is the number of B frames between I/P frames 3867 // PFramesSpacing (the value to be returned) is the number of P frames between I frames 3868 // 3869 // keyFrameInterval = ((PFramesSpacing + 1) * BFramesSpacing) + PFramesSpacing + 1 3870 // ^^^ ^^^ ^^^ 3871 // number of B frames number of P I frame 3872 // 3873 // = (PFramesSpacing + 1) * (BFramesSpacing + 1) 3874 // 3875 // E.g. 3876 // I P I : I-interval: 8, nPFrames 1, nBFrames 3 3877 // BBB BBB 3878 3879 if (iFramesInterval < 0) { // just 1 key frame 3880 return 0xFFFFFFFE; // don't use maxint as key-frame-interval calculation will add 1 3881 } else if (iFramesInterval == 0) { // just key frames 3882 return 0; 3883 } 3884 3885 // round down as key-frame-interval is an upper limit 3886 uint32_t keyFrameInterval = uint32_t(frameRate * iFramesInterval); 3887 OMX_U32 ret = keyFrameInterval / (BFramesSpacing + 1); 3888 return ret > 0 ? ret - 1 : 0; 3889} 3890 3891static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 3892 int32_t tmp; 3893 if (!msg->findInt32("bitrate-mode", &tmp)) { 3894 return OMX_Video_ControlRateVariable; 3895 } 3896 3897 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 3898} 3899 3900status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 3901 int32_t bitrate; 3902 float iFrameInterval; 3903 if (!msg->findInt32("bitrate", &bitrate) 3904 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 3905 return INVALID_OPERATION; 3906 } 3907 3908 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3909 3910 float frameRate; 3911 if (!msg->findFloat("frame-rate", &frameRate)) { 3912 int32_t tmp; 3913 if (!msg->findInt32("frame-rate", &tmp)) { 3914 return INVALID_OPERATION; 3915 } 3916 frameRate = (float)tmp; 3917 } 3918 3919 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 3920 InitOMXParams(&mpeg4type); 3921 mpeg4type.nPortIndex = kPortIndexOutput; 3922 3923 status_t err = mOMXNode->getParameter( 3924 OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3925 3926 if (err != OK) { 3927 return err; 3928 } 3929 3930 mpeg4type.nSliceHeaderSpacing = 0; 3931 mpeg4type.bSVH = OMX_FALSE; 3932 mpeg4type.bGov = OMX_FALSE; 3933 3934 mpeg4type.nAllowedPictureTypes = 3935 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3936 3937 mpeg4type.nBFrames = 0; 3938 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, mpeg4type.nBFrames); 3939 if (mpeg4type.nPFrames == 0) { 3940 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3941 } 3942 mpeg4type.nIDCVLCThreshold = 0; 3943 mpeg4type.bACPred = OMX_TRUE; 3944 mpeg4type.nMaxPacketSize = 256; 3945 mpeg4type.nTimeIncRes = 1000; 3946 mpeg4type.nHeaderExtension = 0; 3947 mpeg4type.bReversibleVLC = OMX_FALSE; 3948 3949 int32_t profile; 3950 if (msg->findInt32("profile", &profile)) { 3951 int32_t level; 3952 if (!msg->findInt32("level", &level)) { 3953 return INVALID_OPERATION; 3954 } 3955 3956 err = verifySupportForProfileAndLevel(profile, level); 3957 3958 if (err != OK) { 3959 return err; 3960 } 3961 3962 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 3963 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 3964 } 3965 3966 err = mOMXNode->setParameter( 3967 OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3968 3969 if (err != OK) { 3970 return err; 3971 } 3972 3973 err = configureBitrate(bitrate, bitrateMode); 3974 3975 if (err != OK) { 3976 return err; 3977 } 3978 3979 return setupErrorCorrectionParameters(); 3980} 3981 3982status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 3983 int32_t bitrate; 3984 float iFrameInterval; 3985 if (!msg->findInt32("bitrate", &bitrate) 3986 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 3987 return INVALID_OPERATION; 3988 } 3989 3990 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3991 3992 float frameRate; 3993 if (!msg->findFloat("frame-rate", &frameRate)) { 3994 int32_t tmp; 3995 if (!msg->findInt32("frame-rate", &tmp)) { 3996 return INVALID_OPERATION; 3997 } 3998 frameRate = (float)tmp; 3999 } 4000 4001 OMX_VIDEO_PARAM_H263TYPE h263type; 4002 InitOMXParams(&h263type); 4003 h263type.nPortIndex = kPortIndexOutput; 4004 4005 status_t err = mOMXNode->getParameter( 4006 OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 4007 4008 if (err != OK) { 4009 return err; 4010 } 4011 4012 h263type.nAllowedPictureTypes = 4013 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4014 4015 h263type.nBFrames = 0; 4016 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h263type.nBFrames); 4017 if (h263type.nPFrames == 0) { 4018 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4019 } 4020 4021 int32_t profile; 4022 if (msg->findInt32("profile", &profile)) { 4023 int32_t level; 4024 if (!msg->findInt32("level", &level)) { 4025 return INVALID_OPERATION; 4026 } 4027 4028 err = verifySupportForProfileAndLevel(profile, level); 4029 4030 if (err != OK) { 4031 return err; 4032 } 4033 4034 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 4035 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 4036 } 4037 4038 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 4039 h263type.bForceRoundingTypeToZero = OMX_FALSE; 4040 h263type.nPictureHeaderRepetition = 0; 4041 h263type.nGOBHeaderInterval = 0; 4042 4043 err = mOMXNode->setParameter( 4044 OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 4045 4046 if (err != OK) { 4047 return err; 4048 } 4049 4050 err = configureBitrate(bitrate, bitrateMode); 4051 4052 if (err != OK) { 4053 return err; 4054 } 4055 4056 return setupErrorCorrectionParameters(); 4057} 4058 4059// static 4060int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 4061 int width, int height, int rate, int bitrate, 4062 OMX_VIDEO_AVCPROFILETYPE profile) { 4063 // convert bitrate to main/baseline profile kbps equivalent 4064 switch (profile) { 4065 case OMX_VIDEO_AVCProfileHigh10: 4066 bitrate = divUp(bitrate, 3000); break; 4067 case OMX_VIDEO_AVCProfileHigh: 4068 bitrate = divUp(bitrate, 1250); break; 4069 default: 4070 bitrate = divUp(bitrate, 1000); break; 4071 } 4072 4073 // convert size and rate to MBs 4074 width = divUp(width, 16); 4075 height = divUp(height, 16); 4076 int mbs = width * height; 4077 rate *= mbs; 4078 int maxDimension = max(width, height); 4079 4080 static const int limits[][5] = { 4081 /* MBps MB dim bitrate level */ 4082 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 4083 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 4084 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 4085 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 4086 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 4087 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 4088 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 4089 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 4090 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 4091 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 4092 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 4093 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 4094 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 4095 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 4096 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 4097 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 4098 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 4099 }; 4100 4101 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 4102 const int (&limit)[5] = limits[i]; 4103 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 4104 && bitrate <= limit[3]) { 4105 return limit[4]; 4106 } 4107 } 4108 return 0; 4109} 4110 4111status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 4112 int32_t bitrate; 4113 float iFrameInterval; 4114 if (!msg->findInt32("bitrate", &bitrate) 4115 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 4116 return INVALID_OPERATION; 4117 } 4118 4119 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4120 4121 float frameRate; 4122 if (!msg->findFloat("frame-rate", &frameRate)) { 4123 int32_t tmp; 4124 if (!msg->findInt32("frame-rate", &tmp)) { 4125 return INVALID_OPERATION; 4126 } 4127 frameRate = (float)tmp; 4128 } 4129 4130 status_t err = OK; 4131 int32_t intraRefreshMode = 0; 4132 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 4133 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 4134 if (err != OK) { 4135 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 4136 err, intraRefreshMode); 4137 return err; 4138 } 4139 } 4140 4141 OMX_VIDEO_PARAM_AVCTYPE h264type; 4142 InitOMXParams(&h264type); 4143 h264type.nPortIndex = kPortIndexOutput; 4144 4145 err = mOMXNode->getParameter( 4146 OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4147 4148 if (err != OK) { 4149 return err; 4150 } 4151 4152 h264type.nAllowedPictureTypes = 4153 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4154 4155 int32_t profile; 4156 if (msg->findInt32("profile", &profile)) { 4157 int32_t level; 4158 if (!msg->findInt32("level", &level)) { 4159 return INVALID_OPERATION; 4160 } 4161 4162 err = verifySupportForProfileAndLevel(profile, level); 4163 4164 if (err != OK) { 4165 return err; 4166 } 4167 4168 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 4169 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 4170 } else { 4171 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 4172#if 0 /* DON'T YET DEFAULT TO HIGHEST PROFILE */ 4173 // Use largest supported profile for AVC recording if profile is not specified. 4174 for (OMX_VIDEO_AVCPROFILETYPE profile : { 4175 OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCProfileMain }) { 4176 if (verifySupportForProfileAndLevel(profile, 0) == OK) { 4177 h264type.eProfile = profile; 4178 break; 4179 } 4180 } 4181#endif 4182 } 4183 4184 ALOGI("setupAVCEncoderParameters with [profile: %s] [level: %s]", 4185 asString(h264type.eProfile), asString(h264type.eLevel)); 4186 4187 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 4188 h264type.nSliceHeaderSpacing = 0; 4189 h264type.bUseHadamard = OMX_TRUE; 4190 h264type.nRefFrames = 1; 4191 h264type.nBFrames = 0; 4192 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4193 if (h264type.nPFrames == 0) { 4194 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4195 } 4196 h264type.nRefIdx10ActiveMinus1 = 0; 4197 h264type.nRefIdx11ActiveMinus1 = 0; 4198 h264type.bEntropyCodingCABAC = OMX_FALSE; 4199 h264type.bWeightedPPrediction = OMX_FALSE; 4200 h264type.bconstIpred = OMX_FALSE; 4201 h264type.bDirect8x8Inference = OMX_FALSE; 4202 h264type.bDirectSpatialTemporal = OMX_FALSE; 4203 h264type.nCabacInitIdc = 0; 4204 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 4205 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 4206 h264type.nSliceHeaderSpacing = 0; 4207 h264type.bUseHadamard = OMX_TRUE; 4208 h264type.nRefFrames = 2; 4209 h264type.nBFrames = 1; 4210 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4211 h264type.nAllowedPictureTypes = 4212 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 4213 h264type.nRefIdx10ActiveMinus1 = 0; 4214 h264type.nRefIdx11ActiveMinus1 = 0; 4215 h264type.bEntropyCodingCABAC = OMX_TRUE; 4216 h264type.bWeightedPPrediction = OMX_TRUE; 4217 h264type.bconstIpred = OMX_TRUE; 4218 h264type.bDirect8x8Inference = OMX_TRUE; 4219 h264type.bDirectSpatialTemporal = OMX_TRUE; 4220 h264type.nCabacInitIdc = 1; 4221 } 4222 4223 if (h264type.nBFrames != 0) { 4224 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 4225 } 4226 4227 h264type.bEnableUEP = OMX_FALSE; 4228 h264type.bEnableFMO = OMX_FALSE; 4229 h264type.bEnableASO = OMX_FALSE; 4230 h264type.bEnableRS = OMX_FALSE; 4231 h264type.bFrameMBsOnly = OMX_TRUE; 4232 h264type.bMBAFF = OMX_FALSE; 4233 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 4234 4235 err = mOMXNode->setParameter( 4236 OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4237 4238 if (err != OK) { 4239 return err; 4240 } 4241 4242 // TRICKY: if we are enabling temporal layering as well, some codecs may not support layering 4243 // when B-frames are enabled. Detect this now so we can disable B frames if temporal layering 4244 // is preferred. 4245 AString tsSchema; 4246 int32_t preferBFrames = (int32_t)false; 4247 if (msg->findString("ts-schema", &tsSchema) 4248 && (!msg->findInt32("android._prefer-b-frames", &preferBFrames) || !preferBFrames)) { 4249 OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layering; 4250 InitOMXParams(&layering); 4251 layering.nPortIndex = kPortIndexOutput; 4252 if (mOMXNode->getParameter( 4253 (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 4254 &layering, sizeof(layering)) == OK 4255 && layering.eSupportedPatterns 4256 && layering.nBLayerCountMax == 0) { 4257 h264type.nBFrames = 0; 4258 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4259 h264type.nAllowedPictureTypes &= ~OMX_VIDEO_PictureTypeB; 4260 ALOGI("disabling B-frames"); 4261 err = mOMXNode->setParameter( 4262 OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4263 4264 if (err != OK) { 4265 return err; 4266 } 4267 } 4268 } 4269 4270 return configureBitrate(bitrate, bitrateMode); 4271} 4272 4273status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 4274 int32_t bitrate; 4275 float iFrameInterval; 4276 if (!msg->findInt32("bitrate", &bitrate) 4277 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 4278 return INVALID_OPERATION; 4279 } 4280 4281 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4282 4283 float frameRate; 4284 if (!msg->findFloat("frame-rate", &frameRate)) { 4285 int32_t tmp; 4286 if (!msg->findInt32("frame-rate", &tmp)) { 4287 return INVALID_OPERATION; 4288 } 4289 frameRate = (float)tmp; 4290 } 4291 4292 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 4293 InitOMXParams(&hevcType); 4294 hevcType.nPortIndex = kPortIndexOutput; 4295 4296 status_t err = OK; 4297 err = mOMXNode->getParameter( 4298 (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4299 if (err != OK) { 4300 return err; 4301 } 4302 4303 int32_t profile; 4304 if (msg->findInt32("profile", &profile)) { 4305 int32_t level; 4306 if (!msg->findInt32("level", &level)) { 4307 return INVALID_OPERATION; 4308 } 4309 4310 err = verifySupportForProfileAndLevel(profile, level); 4311 if (err != OK) { 4312 return err; 4313 } 4314 4315 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 4316 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 4317 } 4318 // TODO: finer control? 4319 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1; 4320 4321 err = mOMXNode->setParameter( 4322 (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4323 if (err != OK) { 4324 return err; 4325 } 4326 4327 return configureBitrate(bitrate, bitrateMode); 4328} 4329 4330status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg, sp<AMessage> &outputFormat) { 4331 int32_t bitrate; 4332 float iFrameInterval = 0; 4333 size_t tsLayers = 0; 4334 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 4335 OMX_VIDEO_VPXTemporalLayerPatternNone; 4336 static const uint32_t kVp8LayerRateAlloction 4337 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 4338 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 4339 {100, 100, 100}, // 1 layer 4340 { 60, 100, 100}, // 2 layers {60%, 40%} 4341 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 4342 }; 4343 if (!msg->findInt32("bitrate", &bitrate)) { 4344 return INVALID_OPERATION; 4345 } 4346 msg->findAsFloat("i-frame-interval", &iFrameInterval); 4347 4348 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4349 4350 float frameRate; 4351 if (!msg->findFloat("frame-rate", &frameRate)) { 4352 int32_t tmp; 4353 if (!msg->findInt32("frame-rate", &tmp)) { 4354 return INVALID_OPERATION; 4355 } 4356 frameRate = (float)tmp; 4357 } 4358 4359 AString tsSchema; 4360 OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE tsType = 4361 OMX_VIDEO_AndroidTemporalLayeringPatternNone; 4362 4363 if (msg->findString("ts-schema", &tsSchema)) { 4364 unsigned int numLayers = 0; 4365 unsigned int numBLayers = 0; 4366 int tags; 4367 char dummy; 4368 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1 4369 && numLayers > 0) { 4370 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4371 tsType = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC; 4372 tsLayers = numLayers; 4373 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c", 4374 &numLayers, &dummy, &numBLayers, &dummy)) 4375 && (tags == 1 || (tags == 3 && dummy == '+')) 4376 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) { 4377 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4378 // VPX does not have a concept of B-frames, so just count all layers 4379 tsType = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid; 4380 tsLayers = numLayers + numBLayers; 4381 } else { 4382 ALOGW("Ignoring unsupported ts-schema [%s]", tsSchema.c_str()); 4383 } 4384 tsLayers = min(tsLayers, (size_t)OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS); 4385 } 4386 4387 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4388 InitOMXParams(&vp8type); 4389 vp8type.nPortIndex = kPortIndexOutput; 4390 status_t err = mOMXNode->getParameter( 4391 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4392 &vp8type, sizeof(vp8type)); 4393 4394 if (err == OK) { 4395 if (iFrameInterval > 0) { 4396 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1; 4397 } 4398 vp8type.eTemporalPattern = pattern; 4399 vp8type.nTemporalLayerCount = tsLayers; 4400 if (tsLayers > 0) { 4401 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4402 vp8type.nTemporalLayerBitrateRatio[i] = 4403 kVp8LayerRateAlloction[tsLayers - 1][i]; 4404 } 4405 } 4406 if (bitrateMode == OMX_Video_ControlRateConstant) { 4407 vp8type.nMinQuantizer = 2; 4408 vp8type.nMaxQuantizer = 63; 4409 } 4410 4411 err = mOMXNode->setParameter( 4412 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4413 &vp8type, sizeof(vp8type)); 4414 if (err != OK) { 4415 ALOGW("Extended VP8 parameters set failed: %d", err); 4416 } else if (tsType == OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC) { 4417 // advertise even single layer WebRTC layering, as it is defined 4418 outputFormat->setString("ts-schema", AStringPrintf("webrtc.vp8.%u-layer", tsLayers)); 4419 } else if (tsLayers > 0) { 4420 // tsType == OMX_VIDEO_AndroidTemporalLayeringPatternAndroid 4421 outputFormat->setString("ts-schema", AStringPrintf("android.generic.%u", tsLayers)); 4422 } 4423 } 4424 4425 return configureBitrate(bitrate, bitrateMode); 4426} 4427 4428status_t ACodec::verifySupportForProfileAndLevel( 4429 int32_t profile, int32_t level) { 4430 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4431 InitOMXParams(¶ms); 4432 params.nPortIndex = kPortIndexOutput; 4433 4434 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4435 params.nProfileIndex = index; 4436 status_t err = mOMXNode->getParameter( 4437 OMX_IndexParamVideoProfileLevelQuerySupported, 4438 ¶ms, sizeof(params)); 4439 4440 if (err != OK) { 4441 return err; 4442 } 4443 4444 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4445 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4446 4447 if (profile == supportedProfile && level <= supportedLevel) { 4448 return OK; 4449 } 4450 4451 if (index == kMaxIndicesToCheck) { 4452 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4453 mComponentName.c_str(), index, 4454 params.eProfile, params.eLevel); 4455 } 4456 } 4457 return ERROR_UNSUPPORTED; 4458} 4459 4460status_t ACodec::configureBitrate( 4461 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4462 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4463 InitOMXParams(&bitrateType); 4464 bitrateType.nPortIndex = kPortIndexOutput; 4465 4466 status_t err = mOMXNode->getParameter( 4467 OMX_IndexParamVideoBitrate, &bitrateType, sizeof(bitrateType)); 4468 4469 if (err != OK) { 4470 return err; 4471 } 4472 4473 bitrateType.eControlRate = bitrateMode; 4474 bitrateType.nTargetBitrate = bitrate; 4475 4476 return mOMXNode->setParameter( 4477 OMX_IndexParamVideoBitrate, &bitrateType, sizeof(bitrateType)); 4478} 4479 4480status_t ACodec::setupErrorCorrectionParameters() { 4481 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4482 InitOMXParams(&errorCorrectionType); 4483 errorCorrectionType.nPortIndex = kPortIndexOutput; 4484 4485 status_t err = mOMXNode->getParameter( 4486 OMX_IndexParamVideoErrorCorrection, 4487 &errorCorrectionType, sizeof(errorCorrectionType)); 4488 4489 if (err != OK) { 4490 return OK; // Optional feature. Ignore this failure 4491 } 4492 4493 errorCorrectionType.bEnableHEC = OMX_FALSE; 4494 errorCorrectionType.bEnableResync = OMX_TRUE; 4495 errorCorrectionType.nResynchMarkerSpacing = 256; 4496 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4497 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4498 4499 return mOMXNode->setParameter( 4500 OMX_IndexParamVideoErrorCorrection, 4501 &errorCorrectionType, sizeof(errorCorrectionType)); 4502} 4503 4504status_t ACodec::setVideoFormatOnPort( 4505 OMX_U32 portIndex, 4506 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4507 float frameRate) { 4508 OMX_PARAM_PORTDEFINITIONTYPE def; 4509 InitOMXParams(&def); 4510 def.nPortIndex = portIndex; 4511 4512 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4513 4514 status_t err = mOMXNode->getParameter( 4515 OMX_IndexParamPortDefinition, &def, sizeof(def)); 4516 if (err != OK) { 4517 return err; 4518 } 4519 4520 if (portIndex == kPortIndexInput) { 4521 // XXX Need a (much) better heuristic to compute input buffer sizes. 4522 const size_t X = 64 * 1024; 4523 if (def.nBufferSize < X) { 4524 def.nBufferSize = X; 4525 } 4526 } 4527 4528 if (def.eDomain != OMX_PortDomainVideo) { 4529 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4530 return FAILED_TRANSACTION; 4531 } 4532 4533 video_def->nFrameWidth = width; 4534 video_def->nFrameHeight = height; 4535 4536 if (portIndex == kPortIndexInput) { 4537 video_def->eCompressionFormat = compressionFormat; 4538 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4539 if (frameRate >= 0) { 4540 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4541 } 4542 } 4543 4544 err = mOMXNode->setParameter( 4545 OMX_IndexParamPortDefinition, &def, sizeof(def)); 4546 4547 return err; 4548} 4549 4550status_t ACodec::initNativeWindow() { 4551 if (mNativeWindow != NULL) { 4552 return mOMXNode->enableNativeBuffers(kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4553 } 4554 4555 mOMXNode->enableNativeBuffers(kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4556 return OK; 4557} 4558 4559size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4560 size_t n = 0; 4561 4562 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4563 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4564 4565 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4566 ++n; 4567 } 4568 } 4569 4570 return n; 4571} 4572 4573size_t ACodec::countBuffersOwnedByNativeWindow() const { 4574 size_t n = 0; 4575 4576 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4577 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4578 4579 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4580 ++n; 4581 } 4582 } 4583 4584 return n; 4585} 4586 4587void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4588 if (mNativeWindow == NULL) { 4589 return; 4590 } 4591 4592 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4593 && dequeueBufferFromNativeWindow() != NULL) { 4594 // these buffers will be submitted as regular buffers; account for this 4595 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4596 --mMetadataBuffersToSubmit; 4597 } 4598 } 4599} 4600 4601bool ACodec::allYourBuffersAreBelongToUs( 4602 OMX_U32 portIndex) { 4603 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4604 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4605 4606 if (info->mStatus != BufferInfo::OWNED_BY_US 4607 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4608 ALOGV("[%s] Buffer %u on port %u still has status %d", 4609 mComponentName.c_str(), 4610 info->mBufferID, portIndex, info->mStatus); 4611 return false; 4612 } 4613 } 4614 4615 return true; 4616} 4617 4618bool ACodec::allYourBuffersAreBelongToUs() { 4619 return allYourBuffersAreBelongToUs(kPortIndexInput) 4620 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4621} 4622 4623void ACodec::deferMessage(const sp<AMessage> &msg) { 4624 mDeferredQueue.push_back(msg); 4625} 4626 4627void ACodec::processDeferredMessages() { 4628 List<sp<AMessage> > queue = mDeferredQueue; 4629 mDeferredQueue.clear(); 4630 4631 List<sp<AMessage> >::iterator it = queue.begin(); 4632 while (it != queue.end()) { 4633 onMessageReceived(*it++); 4634 } 4635} 4636 4637status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4638 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4639 OMX_PARAM_PORTDEFINITIONTYPE def; 4640 InitOMXParams(&def); 4641 def.nPortIndex = portIndex; 4642 4643 status_t err = mOMXNode->getParameter(OMX_IndexParamPortDefinition, &def, sizeof(def)); 4644 if (err != OK) { 4645 return err; 4646 } 4647 4648 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4649 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4650 return BAD_VALUE; 4651 } 4652 4653 switch (def.eDomain) { 4654 case OMX_PortDomainVideo: 4655 { 4656 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4657 switch ((int)videoDef->eCompressionFormat) { 4658 case OMX_VIDEO_CodingUnused: 4659 { 4660 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4661 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4662 4663 notify->setInt32("stride", videoDef->nStride); 4664 notify->setInt32("slice-height", videoDef->nSliceHeight); 4665 notify->setInt32("color-format", videoDef->eColorFormat); 4666 4667 if (mNativeWindow == NULL) { 4668 DescribeColorFormat2Params describeParams; 4669 InitOMXParams(&describeParams); 4670 describeParams.eColorFormat = videoDef->eColorFormat; 4671 describeParams.nFrameWidth = videoDef->nFrameWidth; 4672 describeParams.nFrameHeight = videoDef->nFrameHeight; 4673 describeParams.nStride = videoDef->nStride; 4674 describeParams.nSliceHeight = videoDef->nSliceHeight; 4675 describeParams.bUsingNativeBuffers = OMX_FALSE; 4676 4677 if (DescribeColorFormat(mOMXNode, describeParams)) { 4678 notify->setBuffer( 4679 "image-data", 4680 ABuffer::CreateAsCopy( 4681 &describeParams.sMediaImage, 4682 sizeof(describeParams.sMediaImage))); 4683 4684 MediaImage2 &img = describeParams.sMediaImage; 4685 MediaImage2::PlaneInfo *plane = img.mPlane; 4686 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4687 mComponentName.c_str(), img.mWidth, img.mHeight, 4688 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4689 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4690 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4691 } 4692 } 4693 4694 int32_t width = (int32_t)videoDef->nFrameWidth; 4695 int32_t height = (int32_t)videoDef->nFrameHeight; 4696 4697 if (portIndex == kPortIndexOutput) { 4698 OMX_CONFIG_RECTTYPE rect; 4699 InitOMXParams(&rect); 4700 rect.nPortIndex = portIndex; 4701 4702 if (mOMXNode->getConfig( 4703 (portIndex == kPortIndexOutput ? 4704 OMX_IndexConfigCommonOutputCrop : 4705 OMX_IndexConfigCommonInputCrop), 4706 &rect, sizeof(rect)) != OK) { 4707 rect.nLeft = 0; 4708 rect.nTop = 0; 4709 rect.nWidth = videoDef->nFrameWidth; 4710 rect.nHeight = videoDef->nFrameHeight; 4711 } 4712 4713 if (rect.nLeft < 0 || 4714 rect.nTop < 0 || 4715 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4716 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4717 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 4718 rect.nLeft, rect.nTop, 4719 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 4720 videoDef->nFrameWidth, videoDef->nFrameHeight); 4721 return BAD_VALUE; 4722 } 4723 4724 notify->setRect( 4725 "crop", 4726 rect.nLeft, 4727 rect.nTop, 4728 rect.nLeft + rect.nWidth - 1, 4729 rect.nTop + rect.nHeight - 1); 4730 4731 width = rect.nWidth; 4732 height = rect.nHeight; 4733 4734 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4735 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4736 width, height, mConfigFormat, notify, 4737 mUsingNativeWindow ? &dataSpace : NULL); 4738 if (mUsingNativeWindow) { 4739 notify->setInt32("android._dataspace", dataSpace); 4740 } 4741 (void)getHDRStaticInfoForVideoCodec(kPortIndexOutput, notify); 4742 } else { 4743 (void)getInputColorAspectsForVideoEncoder(notify); 4744 if (mConfigFormat->contains("hdr-static-info")) { 4745 (void)getHDRStaticInfoForVideoCodec(kPortIndexInput, notify); 4746 } 4747 } 4748 4749 break; 4750 } 4751 4752 case OMX_VIDEO_CodingVP8: 4753 case OMX_VIDEO_CodingVP9: 4754 { 4755 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4756 InitOMXParams(&vp8type); 4757 vp8type.nPortIndex = kPortIndexOutput; 4758 status_t err = mOMXNode->getParameter( 4759 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4760 &vp8type, 4761 sizeof(vp8type)); 4762 4763 if (err == OK) { 4764 if (vp8type.eTemporalPattern == OMX_VIDEO_VPXTemporalLayerPatternWebRTC 4765 && vp8type.nTemporalLayerCount > 0 4766 && vp8type.nTemporalLayerCount 4767 <= OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS) { 4768 // advertise as android.generic if we configured for android.generic 4769 AString origSchema; 4770 if (notify->findString("ts-schema", &origSchema) 4771 && origSchema.startsWith("android.generic")) { 4772 notify->setString("ts-schema", AStringPrintf( 4773 "android.generic.%u", vp8type.nTemporalLayerCount)); 4774 } else { 4775 notify->setString("ts-schema", AStringPrintf( 4776 "webrtc.vp8.%u-layer", vp8type.nTemporalLayerCount)); 4777 } 4778 } 4779 } 4780 // Fall through to set up mime. 4781 } 4782 4783 default: 4784 { 4785 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 4786 // should be CodingUnused 4787 ALOGE("Raw port video compression format is %s(%d)", 4788 asString(videoDef->eCompressionFormat), 4789 videoDef->eCompressionFormat); 4790 return BAD_VALUE; 4791 } 4792 AString mime; 4793 if (GetMimeTypeForVideoCoding( 4794 videoDef->eCompressionFormat, &mime) != OK) { 4795 notify->setString("mime", "application/octet-stream"); 4796 } else { 4797 notify->setString("mime", mime.c_str()); 4798 } 4799 uint32_t intraRefreshPeriod = 0; 4800 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 4801 && intraRefreshPeriod > 0) { 4802 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 4803 } 4804 break; 4805 } 4806 } 4807 notify->setInt32("width", videoDef->nFrameWidth); 4808 notify->setInt32("height", videoDef->nFrameHeight); 4809 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 4810 portIndex == kPortIndexInput ? "input" : "output", 4811 notify->debugString().c_str()); 4812 4813 break; 4814 } 4815 4816 case OMX_PortDomainAudio: 4817 { 4818 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 4819 4820 switch ((int)audioDef->eEncoding) { 4821 case OMX_AUDIO_CodingPCM: 4822 { 4823 OMX_AUDIO_PARAM_PCMMODETYPE params; 4824 InitOMXParams(¶ms); 4825 params.nPortIndex = portIndex; 4826 4827 err = mOMXNode->getParameter( 4828 OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4829 if (err != OK) { 4830 return err; 4831 } 4832 4833 if (params.nChannels <= 0 4834 || (params.nChannels != 1 && !params.bInterleaved) 4835 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 4836 ALOGE("unsupported PCM port: %u channels%s, %u-bit", 4837 params.nChannels, 4838 params.bInterleaved ? " interleaved" : "", 4839 params.nBitPerSample); 4840 return FAILED_TRANSACTION; 4841 } 4842 4843 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 4844 notify->setInt32("channel-count", params.nChannels); 4845 notify->setInt32("sample-rate", params.nSamplingRate); 4846 4847 AudioEncoding encoding = kAudioEncodingPcm16bit; 4848 if (params.eNumData == OMX_NumericalDataUnsigned 4849 && params.nBitPerSample == 8u) { 4850 encoding = kAudioEncodingPcm8bit; 4851 } else if (params.eNumData == OMX_NumericalDataFloat 4852 && params.nBitPerSample == 32u) { 4853 encoding = kAudioEncodingPcmFloat; 4854 } else if (params.nBitPerSample != 16u 4855 || params.eNumData != OMX_NumericalDataSigned) { 4856 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", 4857 asString(params.eNumData), params.eNumData, 4858 asString(params.ePCMMode), params.ePCMMode); 4859 return FAILED_TRANSACTION; 4860 } 4861 notify->setInt32("pcm-encoding", encoding); 4862 4863 if (mChannelMaskPresent) { 4864 notify->setInt32("channel-mask", mChannelMask); 4865 } 4866 break; 4867 } 4868 4869 case OMX_AUDIO_CodingAAC: 4870 { 4871 OMX_AUDIO_PARAM_AACPROFILETYPE params; 4872 InitOMXParams(¶ms); 4873 params.nPortIndex = portIndex; 4874 4875 err = mOMXNode->getParameter( 4876 OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 4877 if (err != OK) { 4878 return err; 4879 } 4880 4881 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 4882 notify->setInt32("channel-count", params.nChannels); 4883 notify->setInt32("sample-rate", params.nSampleRate); 4884 break; 4885 } 4886 4887 case OMX_AUDIO_CodingAMR: 4888 { 4889 OMX_AUDIO_PARAM_AMRTYPE params; 4890 InitOMXParams(¶ms); 4891 params.nPortIndex = portIndex; 4892 4893 err = mOMXNode->getParameter( 4894 OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 4895 if (err != OK) { 4896 return err; 4897 } 4898 4899 notify->setInt32("channel-count", 1); 4900 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 4901 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 4902 notify->setInt32("sample-rate", 16000); 4903 } else { 4904 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 4905 notify->setInt32("sample-rate", 8000); 4906 } 4907 break; 4908 } 4909 4910 case OMX_AUDIO_CodingFLAC: 4911 { 4912 OMX_AUDIO_PARAM_FLACTYPE params; 4913 InitOMXParams(¶ms); 4914 params.nPortIndex = portIndex; 4915 4916 err = mOMXNode->getParameter( 4917 OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 4918 if (err != OK) { 4919 return err; 4920 } 4921 4922 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 4923 notify->setInt32("channel-count", params.nChannels); 4924 notify->setInt32("sample-rate", params.nSampleRate); 4925 break; 4926 } 4927 4928 case OMX_AUDIO_CodingMP3: 4929 { 4930 OMX_AUDIO_PARAM_MP3TYPE params; 4931 InitOMXParams(¶ms); 4932 params.nPortIndex = portIndex; 4933 4934 err = mOMXNode->getParameter( 4935 OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 4936 if (err != OK) { 4937 return err; 4938 } 4939 4940 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 4941 notify->setInt32("channel-count", params.nChannels); 4942 notify->setInt32("sample-rate", params.nSampleRate); 4943 break; 4944 } 4945 4946 case OMX_AUDIO_CodingVORBIS: 4947 { 4948 OMX_AUDIO_PARAM_VORBISTYPE params; 4949 InitOMXParams(¶ms); 4950 params.nPortIndex = portIndex; 4951 4952 err = mOMXNode->getParameter( 4953 OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 4954 if (err != OK) { 4955 return err; 4956 } 4957 4958 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 4959 notify->setInt32("channel-count", params.nChannels); 4960 notify->setInt32("sample-rate", params.nSampleRate); 4961 break; 4962 } 4963 4964 case OMX_AUDIO_CodingAndroidAC3: 4965 { 4966 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 4967 InitOMXParams(¶ms); 4968 params.nPortIndex = portIndex; 4969 4970 err = mOMXNode->getParameter( 4971 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 4972 ¶ms, sizeof(params)); 4973 if (err != OK) { 4974 return err; 4975 } 4976 4977 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 4978 notify->setInt32("channel-count", params.nChannels); 4979 notify->setInt32("sample-rate", params.nSampleRate); 4980 break; 4981 } 4982 4983 case OMX_AUDIO_CodingAndroidEAC3: 4984 { 4985 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 4986 InitOMXParams(¶ms); 4987 params.nPortIndex = portIndex; 4988 4989 err = mOMXNode->getParameter( 4990 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 4991 ¶ms, sizeof(params)); 4992 if (err != OK) { 4993 return err; 4994 } 4995 4996 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 4997 notify->setInt32("channel-count", params.nChannels); 4998 notify->setInt32("sample-rate", params.nSampleRate); 4999 break; 5000 } 5001 5002 case OMX_AUDIO_CodingAndroidOPUS: 5003 { 5004 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 5005 InitOMXParams(¶ms); 5006 params.nPortIndex = portIndex; 5007 5008 err = mOMXNode->getParameter( 5009 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 5010 ¶ms, sizeof(params)); 5011 if (err != OK) { 5012 return err; 5013 } 5014 5015 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 5016 notify->setInt32("channel-count", params.nChannels); 5017 notify->setInt32("sample-rate", params.nSampleRate); 5018 break; 5019 } 5020 5021 case OMX_AUDIO_CodingG711: 5022 { 5023 OMX_AUDIO_PARAM_PCMMODETYPE params; 5024 InitOMXParams(¶ms); 5025 params.nPortIndex = portIndex; 5026 5027 err = mOMXNode->getParameter( 5028 (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5029 if (err != OK) { 5030 return err; 5031 } 5032 5033 const char *mime = NULL; 5034 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 5035 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 5036 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 5037 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 5038 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 5039 mime = MEDIA_MIMETYPE_AUDIO_RAW; 5040 } 5041 notify->setString("mime", mime); 5042 notify->setInt32("channel-count", params.nChannels); 5043 notify->setInt32("sample-rate", params.nSamplingRate); 5044 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 5045 break; 5046 } 5047 5048 case OMX_AUDIO_CodingGSMFR: 5049 { 5050 OMX_AUDIO_PARAM_PCMMODETYPE params; 5051 InitOMXParams(¶ms); 5052 params.nPortIndex = portIndex; 5053 5054 err = mOMXNode->getParameter( 5055 OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5056 if (err != OK) { 5057 return err; 5058 } 5059 5060 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 5061 notify->setInt32("channel-count", params.nChannels); 5062 notify->setInt32("sample-rate", params.nSamplingRate); 5063 break; 5064 } 5065 5066 default: 5067 ALOGE("Unsupported audio coding: %s(%d)\n", 5068 asString(audioDef->eEncoding), audioDef->eEncoding); 5069 return BAD_TYPE; 5070 } 5071 break; 5072 } 5073 5074 default: 5075 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 5076 return BAD_TYPE; 5077 } 5078 5079 return OK; 5080} 5081 5082void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 5083 // aspects are normally communicated in ColorAspects 5084 int32_t range, standard, transfer; 5085 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 5086 5087 // if some aspects are unspecified, use dataspace fields 5088 if (range != 0) { 5089 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 5090 } 5091 if (standard != 0) { 5092 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 5093 } 5094 if (transfer != 0) { 5095 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 5096 } 5097 5098 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 5099 if (range != 0) { 5100 mOutputFormat->setInt32("color-range", range); 5101 } 5102 if (standard != 0) { 5103 mOutputFormat->setInt32("color-standard", standard); 5104 } 5105 if (transfer != 0) { 5106 mOutputFormat->setInt32("color-transfer", transfer); 5107 } 5108 5109 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 5110 "(R:%d(%s), S:%d(%s), T:%d(%s))", 5111 dataSpace, 5112 aspects.mRange, asString(aspects.mRange), 5113 aspects.mPrimaries, asString(aspects.mPrimaries), 5114 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 5115 aspects.mTransfer, asString(aspects.mTransfer), 5116 range, asString((ColorRange)range), 5117 standard, asString((ColorStandard)standard), 5118 transfer, asString((ColorTransfer)transfer)); 5119} 5120 5121void ACodec::onOutputFormatChanged(sp<const AMessage> expectedFormat) { 5122 // store new output format, at the same time mark that this is no longer the first frame 5123 mOutputFormat = mBaseOutputFormat->dup(); 5124 5125 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 5126 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 5127 return; 5128 } 5129 5130 if (expectedFormat != NULL) { 5131 sp<const AMessage> changes = expectedFormat->changesFrom(mOutputFormat); 5132 sp<const AMessage> to = mOutputFormat->changesFrom(expectedFormat); 5133 if (changes->countEntries() != 0 || to->countEntries() != 0) { 5134 ALOGW("[%s] BAD CODEC: Output format changed unexpectedly from (diff) %s to (diff) %s", 5135 mComponentName.c_str(), 5136 changes->debugString(4).c_str(), to->debugString(4).c_str()); 5137 } 5138 } 5139 5140 if (!mIsVideo && !mIsEncoder) { 5141 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 5142 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5143 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 5144 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5145 5146 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 5147 if (mConverter[kPortIndexOutput] != NULL) { 5148 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 5149 } 5150 } 5151 5152 if (mTunneled) { 5153 sendFormatChange(); 5154 } 5155} 5156 5157void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 5158 AString mime; 5159 CHECK(mOutputFormat->findString("mime", &mime)); 5160 5161 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 5162 // notify renderer of the crop change and dataspace change 5163 // NOTE: native window uses extended right-bottom coordinate 5164 int32_t left, top, right, bottom; 5165 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 5166 notify->setRect("crop", left, top, right + 1, bottom + 1); 5167 } 5168 5169 int32_t dataSpace; 5170 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 5171 notify->setInt32("dataspace", dataSpace); 5172 } 5173 } 5174} 5175 5176void ACodec::sendFormatChange() { 5177 AString mime; 5178 CHECK(mOutputFormat->findString("mime", &mime)); 5179 5180 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 5181 int32_t channelCount, sampleRate; 5182 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 5183 CHECK(mOutputFormat->findInt32("sample-rate", &sampleRate)); 5184 if (mSampleRate != 0 && sampleRate != 0) { 5185 mEncoderDelay = mEncoderDelay * sampleRate / mSampleRate; 5186 mEncoderPadding = mEncoderPadding * sampleRate / mSampleRate; 5187 mSampleRate = sampleRate; 5188 } 5189 if (mSkipCutBuffer != NULL) { 5190 size_t prevbufsize = mSkipCutBuffer->size(); 5191 if (prevbufsize != 0) { 5192 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 5193 } 5194 } 5195 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 5196 } 5197 5198 sp<AMessage> notify = mNotify->dup(); 5199 notify->setInt32("what", kWhatOutputFormatChanged); 5200 notify->setMessage("format", mOutputFormat); 5201 notify->post(); 5202 5203 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 5204 mLastOutputFormat = mOutputFormat; 5205} 5206 5207void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 5208 sp<AMessage> notify = mNotify->dup(); 5209 notify->setInt32("what", CodecBase::kWhatError); 5210 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 5211 5212 if (internalError == UNKNOWN_ERROR) { // find better error code 5213 const status_t omxStatus = statusFromOMXError(error); 5214 if (omxStatus != 0) { 5215 internalError = omxStatus; 5216 } else { 5217 ALOGW("Invalid OMX error %#x", error); 5218 } 5219 } 5220 5221 mFatalError = true; 5222 5223 notify->setInt32("err", internalError); 5224 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5225 notify->post(); 5226} 5227 5228status_t ACodec::requestIDRFrame() { 5229 if (!mIsEncoder) { 5230 return ERROR_UNSUPPORTED; 5231 } 5232 5233 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5234 InitOMXParams(¶ms); 5235 5236 params.nPortIndex = kPortIndexOutput; 5237 params.IntraRefreshVOP = OMX_TRUE; 5238 5239 return mOMXNode->setConfig( 5240 OMX_IndexConfigVideoIntraVOPRefresh, 5241 ¶ms, 5242 sizeof(params)); 5243} 5244 5245//////////////////////////////////////////////////////////////////////////////// 5246 5247ACodec::PortDescription::PortDescription() { 5248} 5249 5250void ACodec::PortDescription::addBuffer( 5251 IOMX::buffer_id id, const sp<MediaCodecBuffer> &buffer, 5252 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) { 5253 mBufferIDs.push_back(id); 5254 mBuffers.push_back(buffer); 5255 mHandles.push_back(handle); 5256 mMemRefs.push_back(memRef); 5257} 5258 5259size_t ACodec::PortDescription::countBuffers() { 5260 return mBufferIDs.size(); 5261} 5262 5263IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5264 return mBufferIDs.itemAt(index); 5265} 5266 5267sp<MediaCodecBuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5268 return mBuffers.itemAt(index); 5269} 5270 5271sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const { 5272 return mHandles.itemAt(index); 5273} 5274 5275sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const { 5276 return mMemRefs.itemAt(index); 5277} 5278 5279//////////////////////////////////////////////////////////////////////////////// 5280 5281ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5282 : AState(parentState), 5283 mCodec(codec) { 5284} 5285 5286ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5287 OMX_U32 /* portIndex */) { 5288 return KEEP_BUFFERS; 5289} 5290 5291bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5292 switch (msg->what()) { 5293 case kWhatInputBufferFilled: 5294 { 5295 onInputBufferFilled(msg); 5296 break; 5297 } 5298 5299 case kWhatOutputBufferDrained: 5300 { 5301 onOutputBufferDrained(msg); 5302 break; 5303 } 5304 5305 case ACodec::kWhatOMXMessageList: 5306 { 5307 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5308 } 5309 5310 case ACodec::kWhatOMXMessageItem: 5311 { 5312 // no need to check as we already did it for kWhatOMXMessageList 5313 return onOMXMessage(msg); 5314 } 5315 5316 case ACodec::kWhatOMXMessage: 5317 { 5318 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5319 } 5320 5321 case ACodec::kWhatSetSurface: 5322 { 5323 sp<AReplyToken> replyID; 5324 CHECK(msg->senderAwaitsResponse(&replyID)); 5325 5326 sp<RefBase> obj; 5327 CHECK(msg->findObject("surface", &obj)); 5328 5329 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5330 5331 sp<AMessage> response = new AMessage; 5332 response->setInt32("err", err); 5333 response->postReply(replyID); 5334 break; 5335 } 5336 5337 case ACodec::kWhatCreateInputSurface: 5338 case ACodec::kWhatSetInputSurface: 5339 case ACodec::kWhatSignalEndOfInputStream: 5340 { 5341 // This may result in an app illegal state exception. 5342 ALOGE("Message 0x%x was not handled", msg->what()); 5343 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5344 return true; 5345 } 5346 5347 case ACodec::kWhatOMXDied: 5348 { 5349 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5350 ALOGE("OMX/mediaserver died, signalling error!"); 5351 mCodec->mGraphicBufferSource.clear(); 5352 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5353 break; 5354 } 5355 5356 case ACodec::kWhatReleaseCodecInstance: 5357 { 5358 ALOGI("[%s] forcing the release of codec", 5359 mCodec->mComponentName.c_str()); 5360 status_t err = mCodec->mOMXNode->freeNode(); 5361 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5362 mCodec->mComponentName.c_str(), err); 5363 sp<AMessage> notify = mCodec->mNotify->dup(); 5364 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5365 notify->post(); 5366 break; 5367 } 5368 5369 default: 5370 return false; 5371 } 5372 5373 return true; 5374} 5375 5376bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5377 // there is a possibility that this is an outstanding message for a 5378 // codec that we have already destroyed 5379 if (mCodec->mOMXNode == NULL) { 5380 ALOGI("ignoring message as already freed component: %s", 5381 msg->debugString().c_str()); 5382 return false; 5383 } 5384 5385 int32_t generation; 5386 CHECK(msg->findInt32("generation", (int32_t*)&generation)); 5387 if (generation != mCodec->mNodeGeneration) { 5388 ALOGW("Unexpected message for component: %s, gen %u, cur %u", 5389 msg->debugString().c_str(), generation, mCodec->mNodeGeneration); 5390 return false; 5391 } 5392 return true; 5393} 5394 5395bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5396 sp<RefBase> obj; 5397 CHECK(msg->findObject("messages", &obj)); 5398 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5399 5400 bool receivedRenderedEvents = false; 5401 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5402 it != msgList->getList().cend(); ++it) { 5403 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5404 mCodec->handleMessage(*it); 5405 int32_t type; 5406 CHECK((*it)->findInt32("type", &type)); 5407 if (type == omx_message::FRAME_RENDERED) { 5408 receivedRenderedEvents = true; 5409 } 5410 } 5411 5412 if (receivedRenderedEvents) { 5413 // NOTE: all buffers are rendered in this case 5414 mCodec->notifyOfRenderedFrames(); 5415 } 5416 return true; 5417} 5418 5419bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5420 int32_t type; 5421 CHECK(msg->findInt32("type", &type)); 5422 5423 switch (type) { 5424 case omx_message::EVENT: 5425 { 5426 int32_t event, data1, data2; 5427 CHECK(msg->findInt32("event", &event)); 5428 CHECK(msg->findInt32("data1", &data1)); 5429 CHECK(msg->findInt32("data2", &data2)); 5430 5431 if (event == OMX_EventCmdComplete 5432 && data1 == OMX_CommandFlush 5433 && data2 == (int32_t)OMX_ALL) { 5434 // Use of this notification is not consistent across 5435 // implementations. We'll drop this notification and rely 5436 // on flush-complete notifications on the individual port 5437 // indices instead. 5438 5439 return true; 5440 } 5441 5442 return onOMXEvent( 5443 static_cast<OMX_EVENTTYPE>(event), 5444 static_cast<OMX_U32>(data1), 5445 static_cast<OMX_U32>(data2)); 5446 } 5447 5448 case omx_message::EMPTY_BUFFER_DONE: 5449 { 5450 IOMX::buffer_id bufferID; 5451 int32_t fenceFd; 5452 5453 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5454 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5455 5456 return onOMXEmptyBufferDone(bufferID, fenceFd); 5457 } 5458 5459 case omx_message::FILL_BUFFER_DONE: 5460 { 5461 IOMX::buffer_id bufferID; 5462 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5463 5464 int32_t rangeOffset, rangeLength, flags, fenceFd; 5465 int64_t timeUs; 5466 5467 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5468 CHECK(msg->findInt32("range_length", &rangeLength)); 5469 CHECK(msg->findInt32("flags", &flags)); 5470 CHECK(msg->findInt64("timestamp", &timeUs)); 5471 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5472 5473 return onOMXFillBufferDone( 5474 bufferID, 5475 (size_t)rangeOffset, (size_t)rangeLength, 5476 (OMX_U32)flags, 5477 timeUs, 5478 fenceFd); 5479 } 5480 5481 case omx_message::FRAME_RENDERED: 5482 { 5483 int64_t mediaTimeUs, systemNano; 5484 5485 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5486 CHECK(msg->findInt64("system_nano", &systemNano)); 5487 5488 return onOMXFrameRendered( 5489 mediaTimeUs, systemNano); 5490 } 5491 5492 default: 5493 ALOGE("Unexpected message type: %d", type); 5494 return false; 5495 } 5496} 5497 5498bool ACodec::BaseState::onOMXFrameRendered( 5499 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5500 // ignore outside of Executing and PortSettingsChanged states 5501 return true; 5502} 5503 5504bool ACodec::BaseState::onOMXEvent( 5505 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5506 if (event == OMX_EventDataSpaceChanged) { 5507 ColorAspects aspects = ColorUtils::unpackToColorAspects(data2); 5508 5509 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5510 return true; 5511 } 5512 5513 if (event != OMX_EventError) { 5514 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5515 mCodec->mComponentName.c_str(), event, data1, data2); 5516 5517 return false; 5518 } 5519 5520 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5521 5522 // verify OMX component sends back an error we expect. 5523 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5524 if (!isOMXError(omxError)) { 5525 ALOGW("Invalid OMX error %#x", omxError); 5526 omxError = OMX_ErrorUndefined; 5527 } 5528 mCodec->signalError(omxError); 5529 5530 return true; 5531} 5532 5533bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5534 ALOGV("[%s] onOMXEmptyBufferDone %u", 5535 mCodec->mComponentName.c_str(), bufferID); 5536 5537 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5538 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5539 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5540 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5541 mCodec->dumpBuffers(kPortIndexInput); 5542 if (fenceFd >= 0) { 5543 ::close(fenceFd); 5544 } 5545 return false; 5546 } 5547 info->mStatus = BufferInfo::OWNED_BY_US; 5548 5549 // input buffers cannot take fences, so wait for any fence now 5550 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5551 fenceFd = -1; 5552 5553 // still save fence for completeness 5554 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5555 5556 // We're in "store-metadata-in-buffers" mode, the underlying 5557 // OMX component had access to data that's implicitly refcounted 5558 // by this "MediaBuffer" object. Now that the OMX component has 5559 // told us that it's done with the input buffer, we can decrement 5560 // the mediaBuffer's reference count. 5561 info->mData->setMediaBufferBase(NULL); 5562 5563 PortMode mode = getPortMode(kPortIndexInput); 5564 5565 switch (mode) { 5566 case KEEP_BUFFERS: 5567 break; 5568 5569 case RESUBMIT_BUFFERS: 5570 postFillThisBuffer(info); 5571 break; 5572 5573 case FREE_BUFFERS: 5574 default: 5575 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5576 return false; 5577 } 5578 5579 return true; 5580} 5581 5582void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5583 if (mCodec->mPortEOS[kPortIndexInput]) { 5584 return; 5585 } 5586 5587 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5588 5589 sp<AMessage> notify = mCodec->mNotify->dup(); 5590 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5591 notify->setInt32("buffer-id", info->mBufferID); 5592 5593 info->mData->meta()->clear(); 5594 notify->setObject("buffer", info->mData); 5595 5596 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5597 reply->setInt32("buffer-id", info->mBufferID); 5598 5599 notify->setMessage("reply", reply); 5600 5601 notify->post(); 5602 5603 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5604} 5605 5606void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5607 IOMX::buffer_id bufferID; 5608 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5609 sp<MediaCodecBuffer> buffer; 5610 int32_t err = OK; 5611 bool eos = false; 5612 PortMode mode = getPortMode(kPortIndexInput); 5613 5614 sp<RefBase> obj; 5615 if (!msg->findObject("buffer", &obj)) { 5616 /* these are unfilled buffers returned by client */ 5617 CHECK(msg->findInt32("err", &err)); 5618 5619 if (err == OK) { 5620 /* buffers with no errors are returned on MediaCodec.flush */ 5621 mode = KEEP_BUFFERS; 5622 } else { 5623 ALOGV("[%s] saw error %d instead of an input buffer", 5624 mCodec->mComponentName.c_str(), err); 5625 eos = true; 5626 } 5627 5628 buffer.clear(); 5629 } else { 5630 buffer = static_cast<MediaCodecBuffer *>(obj.get()); 5631 } 5632 5633 int32_t tmp; 5634 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5635 eos = true; 5636 err = ERROR_END_OF_STREAM; 5637 } 5638 5639 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5640 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5641 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5642 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5643 mCodec->dumpBuffers(kPortIndexInput); 5644 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5645 return; 5646 } 5647 5648 info->mStatus = BufferInfo::OWNED_BY_US; 5649 5650 switch (mode) { 5651 case KEEP_BUFFERS: 5652 { 5653 if (eos) { 5654 if (!mCodec->mPortEOS[kPortIndexInput]) { 5655 mCodec->mPortEOS[kPortIndexInput] = true; 5656 mCodec->mInputEOSResult = err; 5657 } 5658 } 5659 break; 5660 } 5661 5662 case RESUBMIT_BUFFERS: 5663 { 5664 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5665 // Do not send empty input buffer w/o EOS to the component. 5666 if (buffer->size() == 0 && !eos) { 5667 postFillThisBuffer(info); 5668 break; 5669 } 5670 5671 int64_t timeUs; 5672 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5673 5674 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5675 5676 MetadataBufferType metaType = mCodec->mInputMetadataType; 5677 int32_t isCSD = 0; 5678 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5679 if (mCodec->mIsLegacyVP9Decoder) { 5680 ALOGV("[%s] is legacy VP9 decoder. Ignore %u codec specific data", 5681 mCodec->mComponentName.c_str(), bufferID); 5682 postFillThisBuffer(info); 5683 break; 5684 } 5685 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5686 metaType = kMetadataBufferTypeInvalid; 5687 } 5688 5689 if (eos) { 5690 flags |= OMX_BUFFERFLAG_EOS; 5691 } 5692 5693 if (buffer != info->mCodecData) { 5694 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5695 mCodec->mComponentName.c_str(), 5696 bufferID, 5697 buffer.get(), info->mCodecData.get()); 5698 5699 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; 5700 if (converter == NULL || isCSD) { 5701 converter = getCopyConverter(); 5702 } 5703 status_t err = converter->convert(buffer, info->mCodecData); 5704 if (err != OK) { 5705 mCodec->signalError(OMX_ErrorUndefined, err); 5706 return; 5707 } 5708 } 5709 5710 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5711 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5712 mCodec->mComponentName.c_str(), bufferID); 5713 } else if (flags & OMX_BUFFERFLAG_EOS) { 5714 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5715 mCodec->mComponentName.c_str(), bufferID); 5716 } else { 5717#if TRACK_BUFFER_TIMING 5718 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5719 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5720#else 5721 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5722 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5723#endif 5724 } 5725 5726#if TRACK_BUFFER_TIMING 5727 ACodec::BufferStats stats; 5728 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5729 stats.mFillBufferDoneTimeUs = -1ll; 5730 mCodec->mBufferStats.add(timeUs, stats); 5731#endif 5732 5733 if (mCodec->storingMetadataInDecodedBuffers()) { 5734 // try to submit an output buffer for each input buffer 5735 PortMode outputMode = getPortMode(kPortIndexOutput); 5736 5737 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 5738 mCodec->mMetadataBuffersToSubmit, 5739 (outputMode == FREE_BUFFERS ? "FREE" : 5740 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 5741 if (outputMode == RESUBMIT_BUFFERS) { 5742 mCodec->submitOutputMetadataBuffer(); 5743 } 5744 } 5745 info->checkReadFence("onInputBufferFilled"); 5746 5747 status_t err2 = OK; 5748 switch (metaType) { 5749 case kMetadataBufferTypeInvalid: 5750 break; 5751#ifndef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5752 case kMetadataBufferTypeNativeHandleSource: 5753 if (info->mCodecData->size() >= sizeof(VideoNativeHandleMetadata)) { 5754 VideoNativeHandleMetadata *vnhmd = 5755 (VideoNativeHandleMetadata*)info->mCodecData->base(); 5756 err2 = mCodec->mOMXNode->updateNativeHandleInMeta( 5757 mCodec->kPortIndexInput, 5758 NativeHandle::create(vnhmd->pHandle, false /* ownsHandle */), 5759 bufferID); 5760 } 5761 break; 5762 case kMetadataBufferTypeANWBuffer: 5763 if (info->mCodecData->size() >= sizeof(VideoNativeMetadata)) { 5764 VideoNativeMetadata *vnmd = (VideoNativeMetadata*)info->mCodecData->base(); 5765 err2 = mCodec->mOMXNode->updateGraphicBufferInMeta( 5766 mCodec->kPortIndexInput, 5767 new GraphicBuffer(vnmd->pBuffer, false /* keepOwnership */), 5768 bufferID); 5769 } 5770 break; 5771#endif 5772 default: 5773 ALOGW("Can't marshall %s data in %zu sized buffers in %zu-bit mode", 5774 asString(metaType), info->mCodecData->size(), 5775 sizeof(buffer_handle_t) * 8); 5776 err2 = ERROR_UNSUPPORTED; 5777 break; 5778 } 5779 5780 if (err2 == OK) { 5781 err2 = mCodec->mOMXNode->emptyBuffer( 5782 bufferID, 5783 0, 5784 info->mCodecData->size(), 5785 flags, 5786 timeUs, 5787 info->mFenceFd); 5788 } 5789 info->mFenceFd = -1; 5790 if (err2 != OK) { 5791 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5792 return; 5793 } 5794 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5795 5796 if (!eos && err == OK) { 5797 getMoreInputDataIfPossible(); 5798 } else { 5799 ALOGV("[%s] Signalled EOS (%d) on the input port", 5800 mCodec->mComponentName.c_str(), err); 5801 5802 mCodec->mPortEOS[kPortIndexInput] = true; 5803 mCodec->mInputEOSResult = err; 5804 } 5805 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 5806 if (err != OK && err != ERROR_END_OF_STREAM) { 5807 ALOGV("[%s] Signalling EOS on the input port due to error %d", 5808 mCodec->mComponentName.c_str(), err); 5809 } else { 5810 ALOGV("[%s] Signalling EOS on the input port", 5811 mCodec->mComponentName.c_str()); 5812 } 5813 5814 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 5815 mCodec->mComponentName.c_str(), bufferID); 5816 5817 info->checkReadFence("onInputBufferFilled"); 5818 status_t err2 = mCodec->mOMXNode->emptyBuffer( 5819 bufferID, 5820 0, 5821 0, 5822 OMX_BUFFERFLAG_EOS, 5823 0, 5824 info->mFenceFd); 5825 info->mFenceFd = -1; 5826 if (err2 != OK) { 5827 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5828 return; 5829 } 5830 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5831 5832 mCodec->mPortEOS[kPortIndexInput] = true; 5833 mCodec->mInputEOSResult = err; 5834 } 5835 break; 5836 } 5837 5838 case FREE_BUFFERS: 5839 break; 5840 5841 default: 5842 ALOGE("invalid port mode: %d", mode); 5843 break; 5844 } 5845} 5846 5847void ACodec::BaseState::getMoreInputDataIfPossible() { 5848 if (mCodec->mPortEOS[kPortIndexInput]) { 5849 return; 5850 } 5851 5852 BufferInfo *eligible = NULL; 5853 5854 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 5855 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5856 5857#if 0 5858 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 5859 // There's already a "read" pending. 5860 return; 5861 } 5862#endif 5863 5864 if (info->mStatus == BufferInfo::OWNED_BY_US) { 5865 eligible = info; 5866 } 5867 } 5868 5869 if (eligible == NULL) { 5870 return; 5871 } 5872 5873 postFillThisBuffer(eligible); 5874} 5875 5876bool ACodec::BaseState::onOMXFillBufferDone( 5877 IOMX::buffer_id bufferID, 5878 size_t rangeOffset, size_t rangeLength, 5879 OMX_U32 flags, 5880 int64_t timeUs, 5881 int fenceFd) { 5882 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 5883 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 5884 5885 ssize_t index; 5886 status_t err= OK; 5887 5888#if TRACK_BUFFER_TIMING 5889 index = mCodec->mBufferStats.indexOfKey(timeUs); 5890 if (index >= 0) { 5891 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 5892 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 5893 5894 ALOGI("frame PTS %lld: %lld", 5895 timeUs, 5896 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 5897 5898 mCodec->mBufferStats.removeItemsAt(index); 5899 stats = NULL; 5900 } 5901#endif 5902 5903 BufferInfo *info = 5904 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5905 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5906 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5907 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5908 mCodec->dumpBuffers(kPortIndexOutput); 5909 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5910 if (fenceFd >= 0) { 5911 ::close(fenceFd); 5912 } 5913 return true; 5914 } 5915 5916 info->mDequeuedAt = ++mCodec->mDequeueCounter; 5917 info->mStatus = BufferInfo::OWNED_BY_US; 5918 5919 if (info->mRenderInfo != NULL) { 5920 // The fence for an emptied buffer must have signaled, but there still could be queued 5921 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 5922 // as we will soon requeue this buffer to the surface. While in theory we could still keep 5923 // track of buffers that are requeued to the surface, it is better to add support to the 5924 // buffer-queue to notify us of released buffers and their fences (in the future). 5925 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 5926 } 5927 5928 // byte buffers cannot take fences, so wait for any fence now 5929 if (mCodec->mNativeWindow == NULL) { 5930 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 5931 fenceFd = -1; 5932 } 5933 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 5934 5935 PortMode mode = getPortMode(kPortIndexOutput); 5936 5937 switch (mode) { 5938 case KEEP_BUFFERS: 5939 break; 5940 5941 case RESUBMIT_BUFFERS: 5942 { 5943 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 5944 || mCodec->mPortEOS[kPortIndexOutput])) { 5945 ALOGV("[%s] calling fillBuffer %u", 5946 mCodec->mComponentName.c_str(), info->mBufferID); 5947 5948 err = mCodec->mOMXNode->fillBuffer(info->mBufferID, info->mFenceFd); 5949 info->mFenceFd = -1; 5950 if (err != OK) { 5951 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5952 return true; 5953 } 5954 5955 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5956 break; 5957 } 5958 5959 sp<AMessage> reply = 5960 new AMessage(kWhatOutputBufferDrained, mCodec); 5961 sp<MediaCodecBuffer> buffer = info->mData; 5962 5963 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 5964 // pretend that output format has changed on the first frame (we used to do this) 5965 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) { 5966 mCodec->onOutputFormatChanged(mCodec->mOutputFormat); 5967 } 5968 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 5969 mCodec->sendFormatChange(); 5970 } else if (rangeLength > 0 && mCodec->mNativeWindow != NULL) { 5971 // If potentially rendering onto a surface, always save key format data (crop & 5972 // data space) so that we can set it if and once the buffer is rendered. 5973 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 5974 } 5975 5976 if (mCodec->usingMetadataOnEncoderOutput()) { 5977 native_handle_t *handle = NULL; 5978 VideoNativeHandleMetadata &nativeMeta = 5979 *(VideoNativeHandleMetadata *)buffer->data(); 5980 if (buffer->size() >= sizeof(nativeMeta) 5981 && nativeMeta.eType == kMetadataBufferTypeNativeHandleSource) { 5982#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5983 // handle is only valid on 32-bit/mediaserver process 5984 handle = NULL; 5985#else 5986 handle = (native_handle_t *)nativeMeta.pHandle; 5987#endif 5988 } 5989 buffer->meta()->setPointer("handle", handle); 5990 buffer->meta()->setInt32("rangeOffset", rangeOffset); 5991 buffer->meta()->setInt32("rangeLength", rangeLength); 5992 } else if (buffer == info->mCodecData) { 5993 buffer->setRange(rangeOffset, rangeLength); 5994 } else { 5995 info->mCodecData->setRange(rangeOffset, rangeLength); 5996 // in this case we know that mConverter is not null 5997 status_t err = mCodec->mConverter[kPortIndexOutput]->convert( 5998 info->mCodecData, buffer); 5999 if (err != OK) { 6000 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6001 return true; 6002 } 6003 } 6004#if 0 6005 if (mCodec->mNativeWindow == NULL) { 6006 if (IsIDR(info->mData)) { 6007 ALOGI("IDR frame"); 6008 } 6009 } 6010#endif 6011 6012 if (mCodec->mSkipCutBuffer != NULL) { 6013 mCodec->mSkipCutBuffer->submit(buffer); 6014 } 6015 buffer->meta()->setInt64("timeUs", timeUs); 6016 6017 sp<AMessage> notify = mCodec->mNotify->dup(); 6018 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 6019 notify->setInt32("buffer-id", info->mBufferID); 6020 notify->setObject("buffer", buffer); 6021 notify->setInt32("flags", flags); 6022 6023 reply->setInt32("buffer-id", info->mBufferID); 6024 6025 notify->setMessage("reply", reply); 6026 6027 notify->post(); 6028 6029 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 6030 6031 if (flags & OMX_BUFFERFLAG_EOS) { 6032 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 6033 6034 sp<AMessage> notify = mCodec->mNotify->dup(); 6035 notify->setInt32("what", CodecBase::kWhatEOS); 6036 notify->setInt32("err", mCodec->mInputEOSResult); 6037 notify->post(); 6038 6039 mCodec->mPortEOS[kPortIndexOutput] = true; 6040 } 6041 break; 6042 } 6043 6044 case FREE_BUFFERS: 6045 err = mCodec->freeBuffer(kPortIndexOutput, index); 6046 if (err != OK) { 6047 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6048 return true; 6049 } 6050 break; 6051 6052 default: 6053 ALOGE("Invalid port mode: %d", mode); 6054 return false; 6055 } 6056 6057 return true; 6058} 6059 6060void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 6061 IOMX::buffer_id bufferID; 6062 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 6063 sp<RefBase> obj; 6064 sp<MediaCodecBuffer> buffer = nullptr; 6065 if (msg->findObject("buffer", &obj)) { 6066 buffer = static_cast<MediaCodecBuffer *>(obj.get()); 6067 } 6068 ssize_t index; 6069 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6070 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6071 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 6072 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6073 mCodec->dumpBuffers(kPortIndexOutput); 6074 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6075 return; 6076 } 6077 6078 android_native_rect_t crop; 6079 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom) 6080 && memcmp(&crop, &mCodec->mLastNativeWindowCrop, sizeof(crop)) != 0) { 6081 mCodec->mLastNativeWindowCrop = crop; 6082 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 6083 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 6084 } 6085 6086 int32_t dataSpace; 6087 if (msg->findInt32("dataspace", &dataSpace) 6088 && dataSpace != mCodec->mLastNativeWindowDataSpace) { 6089 status_t err = native_window_set_buffers_data_space( 6090 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 6091 mCodec->mLastNativeWindowDataSpace = dataSpace; 6092 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 6093 } 6094 6095 int32_t render; 6096 if (mCodec->mNativeWindow != NULL 6097 && msg->findInt32("render", &render) && render != 0 6098 && buffer != NULL && buffer->size() != 0) { 6099 ATRACE_NAME("render"); 6100 // The client wants this buffer to be rendered. 6101 6102 // save buffers sent to the surface so we can get render time when they return 6103 int64_t mediaTimeUs = -1; 6104 buffer->meta()->findInt64("timeUs", &mediaTimeUs); 6105 if (mediaTimeUs >= 0) { 6106 mCodec->mRenderTracker.onFrameQueued( 6107 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 6108 } 6109 6110 int64_t timestampNs = 0; 6111 if (!msg->findInt64("timestampNs", ×tampNs)) { 6112 // use media timestamp if client did not request a specific render timestamp 6113 if (buffer->meta()->findInt64("timeUs", ×tampNs)) { 6114 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 6115 timestampNs *= 1000; 6116 } 6117 } 6118 6119 status_t err; 6120 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 6121 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 6122 6123 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 6124 err = mCodec->mNativeWindow->queueBuffer( 6125 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 6126 info->mFenceFd = -1; 6127 if (err == OK) { 6128 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 6129 } else { 6130 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 6131 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6132 info->mStatus = BufferInfo::OWNED_BY_US; 6133 // keeping read fence as write fence to avoid clobbering 6134 info->mIsReadFence = false; 6135 } 6136 } else { 6137 if (mCodec->mNativeWindow != NULL && 6138 (buffer == NULL || buffer->size() != 0)) { 6139 // move read fence into write fence to avoid clobbering 6140 info->mIsReadFence = false; 6141 ATRACE_NAME("frame-drop"); 6142 } 6143 info->mStatus = BufferInfo::OWNED_BY_US; 6144 } 6145 6146 PortMode mode = getPortMode(kPortIndexOutput); 6147 6148 switch (mode) { 6149 case KEEP_BUFFERS: 6150 { 6151 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 6152 6153 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6154 // We cannot resubmit the buffer we just rendered, dequeue 6155 // the spare instead. 6156 6157 info = mCodec->dequeueBufferFromNativeWindow(); 6158 } 6159 break; 6160 } 6161 6162 case RESUBMIT_BUFFERS: 6163 { 6164 if (!mCodec->mPortEOS[kPortIndexOutput]) { 6165 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6166 // We cannot resubmit the buffer we just rendered, dequeue 6167 // the spare instead. 6168 6169 info = mCodec->dequeueBufferFromNativeWindow(); 6170 } 6171 6172 if (info != NULL) { 6173 ALOGV("[%s] calling fillBuffer %u", 6174 mCodec->mComponentName.c_str(), info->mBufferID); 6175 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 6176 status_t err = mCodec->mOMXNode->fillBuffer( 6177 info->mBufferID, info->mFenceFd); 6178 info->mFenceFd = -1; 6179 if (err == OK) { 6180 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6181 } else { 6182 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6183 } 6184 } 6185 } 6186 break; 6187 } 6188 6189 case FREE_BUFFERS: 6190 { 6191 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 6192 if (err != OK) { 6193 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6194 } 6195 break; 6196 } 6197 6198 default: 6199 ALOGE("Invalid port mode: %d", mode); 6200 return; 6201 } 6202} 6203 6204//////////////////////////////////////////////////////////////////////////////// 6205 6206ACodec::UninitializedState::UninitializedState(ACodec *codec) 6207 : BaseState(codec) { 6208} 6209 6210void ACodec::UninitializedState::stateEntered() { 6211 ALOGV("Now uninitialized"); 6212 6213 if (mDeathNotifier != NULL) { 6214 if (mCodec->mOMXNode != NULL) { 6215 sp<IBinder> binder = IInterface::asBinder(mCodec->mOMXNode); 6216 binder->unlinkToDeath(mDeathNotifier); 6217 } 6218 mDeathNotifier.clear(); 6219 } 6220 6221 mCodec->mUsingNativeWindow = false; 6222 mCodec->mNativeWindow.clear(); 6223 mCodec->mNativeWindowUsageBits = 0; 6224 mCodec->mOMX.clear(); 6225 mCodec->mOMXNode.clear(); 6226 mCodec->mFlags = 0; 6227 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 6228 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 6229 mCodec->mConverter[0].clear(); 6230 mCodec->mConverter[1].clear(); 6231 mCodec->mComponentName.clear(); 6232} 6233 6234bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 6235 bool handled = false; 6236 6237 switch (msg->what()) { 6238 case ACodec::kWhatSetup: 6239 { 6240 onSetup(msg); 6241 6242 handled = true; 6243 break; 6244 } 6245 6246 case ACodec::kWhatAllocateComponent: 6247 { 6248 onAllocateComponent(msg); 6249 handled = true; 6250 break; 6251 } 6252 6253 case ACodec::kWhatShutdown: 6254 { 6255 int32_t keepComponentAllocated; 6256 CHECK(msg->findInt32( 6257 "keepComponentAllocated", &keepComponentAllocated)); 6258 ALOGW_IF(keepComponentAllocated, 6259 "cannot keep component allocated on shutdown in Uninitialized state"); 6260 6261 sp<AMessage> notify = mCodec->mNotify->dup(); 6262 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6263 notify->post(); 6264 6265 handled = true; 6266 break; 6267 } 6268 6269 case ACodec::kWhatFlush: 6270 { 6271 sp<AMessage> notify = mCodec->mNotify->dup(); 6272 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6273 notify->post(); 6274 6275 handled = true; 6276 break; 6277 } 6278 6279 case ACodec::kWhatReleaseCodecInstance: 6280 { 6281 // nothing to do, as we have already signaled shutdown 6282 handled = true; 6283 break; 6284 } 6285 6286 default: 6287 return BaseState::onMessageReceived(msg); 6288 } 6289 6290 return handled; 6291} 6292 6293void ACodec::UninitializedState::onSetup( 6294 const sp<AMessage> &msg) { 6295 if (onAllocateComponent(msg) 6296 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6297 mCodec->mLoadedState->onStart(); 6298 } 6299} 6300 6301bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6302 ALOGV("onAllocateComponent"); 6303 6304 CHECK(mCodec->mOMXNode == NULL); 6305 6306 OMXClient client; 6307 if (client.connect() != OK) { 6308 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6309 return false; 6310 } 6311 6312 sp<IOMX> omx = client.interface(); 6313 6314 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6315 6316 Vector<AString> matchingCodecs; 6317 6318 AString mime; 6319 6320 AString componentName; 6321 uint32_t quirks = 0; 6322 int32_t encoder = false; 6323 if (msg->findString("componentName", &componentName)) { 6324 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6325 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6326 matchingCodecs.add(componentName); 6327 } 6328 } else { 6329 CHECK(msg->findString("mime", &mime)); 6330 6331 if (!msg->findInt32("encoder", &encoder)) { 6332 encoder = false; 6333 } 6334 6335 MediaCodecList::findMatchingCodecs( 6336 mime.c_str(), 6337 encoder, // createEncoder 6338 0, // flags 6339 &matchingCodecs); 6340 } 6341 6342 sp<CodecObserver> observer = new CodecObserver; 6343 sp<IOMXNode> omxNode; 6344 6345 status_t err = NAME_NOT_FOUND; 6346 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6347 ++matchIndex) { 6348 componentName = matchingCodecs[matchIndex]; 6349 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6350 6351 pid_t tid = gettid(); 6352 int prevPriority = androidGetThreadPriority(tid); 6353 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6354 err = omx->allocateNode(componentName.c_str(), observer, &omxNode); 6355 androidSetThreadPriority(tid, prevPriority); 6356 6357 if (err == OK) { 6358 break; 6359 } else { 6360 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6361 } 6362 6363 omxNode = NULL; 6364 } 6365 6366 if (omxNode == NULL) { 6367 if (!mime.empty()) { 6368 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6369 encoder ? "en" : "de", mime.c_str(), err); 6370 } else { 6371 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6372 } 6373 6374 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6375 return false; 6376 } 6377 6378 mDeathNotifier = new DeathNotifier(notify); 6379 if (IInterface::asBinder(omxNode)->linkToDeath(mDeathNotifier) != OK) { 6380 // This was a local binder, if it dies so do we, we won't care 6381 // about any notifications in the afterlife. 6382 mDeathNotifier.clear(); 6383 } 6384 6385 notify = new AMessage(kWhatOMXMessageList, mCodec); 6386 notify->setInt32("generation", ++mCodec->mNodeGeneration); 6387 observer->setNotificationMessage(notify); 6388 6389 mCodec->mComponentName = componentName; 6390 mCodec->mRenderTracker.setComponentName(componentName); 6391 mCodec->mFlags = 0; 6392 6393 if (componentName.endsWith(".secure")) { 6394 mCodec->mFlags |= kFlagIsSecure; 6395 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6396 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6397 } 6398 6399 omxNode->setQuirks(quirks); 6400 mCodec->mOMX = omx; 6401 mCodec->mOMXNode = omxNode; 6402 6403 { 6404 sp<AMessage> notify = mCodec->mNotify->dup(); 6405 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6406 notify->setString("componentName", mCodec->mComponentName.c_str()); 6407 notify->post(); 6408 } 6409 6410 mCodec->changeState(mCodec->mLoadedState); 6411 6412 return true; 6413} 6414 6415//////////////////////////////////////////////////////////////////////////////// 6416 6417ACodec::LoadedState::LoadedState(ACodec *codec) 6418 : BaseState(codec) { 6419} 6420 6421void ACodec::LoadedState::stateEntered() { 6422 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6423 6424 mCodec->mPortEOS[kPortIndexInput] = 6425 mCodec->mPortEOS[kPortIndexOutput] = false; 6426 6427 mCodec->mInputEOSResult = OK; 6428 6429 mCodec->mDequeueCounter = 0; 6430 mCodec->mMetadataBuffersToSubmit = 0; 6431 mCodec->mRepeatFrameDelayUs = -1ll; 6432 mCodec->mInputFormat.clear(); 6433 mCodec->mOutputFormat.clear(); 6434 mCodec->mBaseOutputFormat.clear(); 6435 mCodec->mGraphicBufferSource.clear(); 6436 6437 if (mCodec->mShutdownInProgress) { 6438 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6439 6440 mCodec->mShutdownInProgress = false; 6441 mCodec->mKeepComponentAllocated = false; 6442 6443 onShutdown(keepComponentAllocated); 6444 } 6445 mCodec->mExplicitShutdown = false; 6446 6447 mCodec->processDeferredMessages(); 6448} 6449 6450void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6451 if (!keepComponentAllocated) { 6452 (void)mCodec->mOMXNode->freeNode(); 6453 6454 mCodec->changeState(mCodec->mUninitializedState); 6455 } 6456 6457 if (mCodec->mExplicitShutdown) { 6458 sp<AMessage> notify = mCodec->mNotify->dup(); 6459 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6460 notify->post(); 6461 mCodec->mExplicitShutdown = false; 6462 } 6463} 6464 6465bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6466 bool handled = false; 6467 6468 switch (msg->what()) { 6469 case ACodec::kWhatConfigureComponent: 6470 { 6471 onConfigureComponent(msg); 6472 handled = true; 6473 break; 6474 } 6475 6476 case ACodec::kWhatCreateInputSurface: 6477 { 6478 onCreateInputSurface(msg); 6479 handled = true; 6480 break; 6481 } 6482 6483 case ACodec::kWhatSetInputSurface: 6484 { 6485 onSetInputSurface(msg); 6486 handled = true; 6487 break; 6488 } 6489 6490 case ACodec::kWhatStart: 6491 { 6492 onStart(); 6493 handled = true; 6494 break; 6495 } 6496 6497 case ACodec::kWhatShutdown: 6498 { 6499 int32_t keepComponentAllocated; 6500 CHECK(msg->findInt32( 6501 "keepComponentAllocated", &keepComponentAllocated)); 6502 6503 mCodec->mExplicitShutdown = true; 6504 onShutdown(keepComponentAllocated); 6505 6506 handled = true; 6507 break; 6508 } 6509 6510 case ACodec::kWhatFlush: 6511 { 6512 sp<AMessage> notify = mCodec->mNotify->dup(); 6513 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6514 notify->post(); 6515 6516 handled = true; 6517 break; 6518 } 6519 6520 default: 6521 return BaseState::onMessageReceived(msg); 6522 } 6523 6524 return handled; 6525} 6526 6527bool ACodec::LoadedState::onConfigureComponent( 6528 const sp<AMessage> &msg) { 6529 ALOGV("onConfigureComponent"); 6530 6531 CHECK(mCodec->mOMXNode != NULL); 6532 6533 status_t err = OK; 6534 AString mime; 6535 if (!msg->findString("mime", &mime)) { 6536 err = BAD_VALUE; 6537 } else { 6538 err = mCodec->configureCodec(mime.c_str(), msg); 6539 } 6540 if (err != OK) { 6541 ALOGE("[%s] configureCodec returning error %d", 6542 mCodec->mComponentName.c_str(), err); 6543 6544 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6545 return false; 6546 } 6547 6548 { 6549 sp<AMessage> notify = mCodec->mNotify->dup(); 6550 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6551 notify->setMessage("input-format", mCodec->mInputFormat); 6552 notify->setMessage("output-format", mCodec->mOutputFormat); 6553 notify->post(); 6554 } 6555 6556 return true; 6557} 6558 6559status_t ACodec::LoadedState::setupInputSurface() { 6560 if (mCodec->mGraphicBufferSource == NULL) { 6561 return BAD_VALUE; 6562 } 6563 6564 android_dataspace dataSpace; 6565 status_t err = 6566 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6567 if (err != OK) { 6568 ALOGE("Failed to get default data space"); 6569 return err; 6570 } 6571 6572 err = statusFromBinderStatus( 6573 mCodec->mGraphicBufferSource->configure(mCodec->mOMXNode, dataSpace)); 6574 if (err != OK) { 6575 ALOGE("[%s] Unable to configure for node (err %d)", 6576 mCodec->mComponentName.c_str(), err); 6577 return err; 6578 } 6579 6580 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6581 err = statusFromBinderStatus( 6582 mCodec->mGraphicBufferSource->setRepeatPreviousFrameDelayUs( 6583 mCodec->mRepeatFrameDelayUs)); 6584 6585 if (err != OK) { 6586 ALOGE("[%s] Unable to configure option to repeat previous " 6587 "frames (err %d)", 6588 mCodec->mComponentName.c_str(), err); 6589 return err; 6590 } 6591 } 6592 6593 if (mCodec->mMaxPtsGapUs > 0ll) { 6594 err = statusFromBinderStatus( 6595 mCodec->mGraphicBufferSource->setMaxTimestampGapUs( 6596 mCodec->mMaxPtsGapUs)); 6597 6598 if (err != OK) { 6599 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6600 mCodec->mComponentName.c_str(), err); 6601 return err; 6602 } 6603 } 6604 6605 if (mCodec->mMaxFps > 0) { 6606 err = statusFromBinderStatus( 6607 mCodec->mGraphicBufferSource->setMaxFps(mCodec->mMaxFps)); 6608 6609 if (err != OK) { 6610 ALOGE("[%s] Unable to configure max fps (err %d)", 6611 mCodec->mComponentName.c_str(), err); 6612 return err; 6613 } 6614 } 6615 6616 if (mCodec->mTimePerCaptureUs > 0ll 6617 && mCodec->mTimePerFrameUs > 0ll) { 6618 err = statusFromBinderStatus( 6619 mCodec->mGraphicBufferSource->setTimeLapseConfig( 6620 mCodec->mTimePerFrameUs, mCodec->mTimePerCaptureUs)); 6621 6622 if (err != OK) { 6623 ALOGE("[%s] Unable to configure time lapse (err %d)", 6624 mCodec->mComponentName.c_str(), err); 6625 return err; 6626 } 6627 } 6628 6629 if (mCodec->mCreateInputBuffersSuspended) { 6630 err = statusFromBinderStatus( 6631 mCodec->mGraphicBufferSource->setSuspend(true)); 6632 6633 if (err != OK) { 6634 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6635 mCodec->mComponentName.c_str(), err); 6636 return err; 6637 } 6638 } 6639 6640 uint32_t usageBits; 6641 if (mCodec->mOMXNode->getParameter( 6642 (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6643 &usageBits, sizeof(usageBits)) == OK) { 6644 mCodec->mInputFormat->setInt32( 6645 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6646 } 6647 6648 sp<ABuffer> colorAspectsBuffer; 6649 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6650 if (colorAspectsBuffer->size() != sizeof(ColorAspects)) { 6651 return INVALID_OPERATION; 6652 } 6653 6654 err = statusFromBinderStatus( 6655 mCodec->mGraphicBufferSource->setColorAspects(ColorUtils::packToU32( 6656 *(ColorAspects *)colorAspectsBuffer->base()))); 6657 6658 if (err != OK) { 6659 ALOGE("[%s] Unable to configure color aspects (err %d)", 6660 mCodec->mComponentName.c_str(), err); 6661 return err; 6662 } 6663 } 6664 return OK; 6665} 6666 6667void ACodec::LoadedState::onCreateInputSurface( 6668 const sp<AMessage> & /* msg */) { 6669 ALOGV("onCreateInputSurface"); 6670 6671 sp<AMessage> notify = mCodec->mNotify->dup(); 6672 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6673 6674 sp<IGraphicBufferProducer> bufferProducer; 6675 status_t err = mCodec->mOMX->createInputSurface( 6676 &bufferProducer, &mCodec->mGraphicBufferSource); 6677 6678 if (err == OK) { 6679 err = setupInputSurface(); 6680 } 6681 6682 if (err == OK) { 6683 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6684 6685 notify->setMessage("input-format", mCodec->mInputFormat); 6686 notify->setMessage("output-format", mCodec->mOutputFormat); 6687 6688 notify->setObject("input-surface", 6689 new BufferProducerWrapper(bufferProducer)); 6690 } else { 6691 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6692 // the error through because it's in the "configured" state. We 6693 // send a kWhatInputSurfaceCreated with an error value instead. 6694 ALOGE("[%s] onCreateInputSurface returning error %d", 6695 mCodec->mComponentName.c_str(), err); 6696 notify->setInt32("err", err); 6697 } 6698 notify->post(); 6699} 6700 6701void ACodec::LoadedState::onSetInputSurface( 6702 const sp<AMessage> &msg) { 6703 ALOGV("onSetInputSurface"); 6704 6705 sp<AMessage> notify = mCodec->mNotify->dup(); 6706 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6707 6708 sp<RefBase> obj; 6709 CHECK(msg->findObject("input-surface", &obj)); 6710 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6711 mCodec->mGraphicBufferSource = surface->getBufferSource(); 6712 6713 status_t err = setupInputSurface(); 6714 6715 if (err == OK) { 6716 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6717 6718 notify->setMessage("input-format", mCodec->mInputFormat); 6719 notify->setMessage("output-format", mCodec->mOutputFormat); 6720 } else { 6721 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6722 // the error through because it's in the "configured" state. We 6723 // send a kWhatInputSurfaceAccepted with an error value instead. 6724 ALOGE("[%s] onSetInputSurface returning error %d", 6725 mCodec->mComponentName.c_str(), err); 6726 notify->setInt32("err", err); 6727 } 6728 notify->post(); 6729} 6730 6731void ACodec::LoadedState::onStart() { 6732 ALOGV("onStart"); 6733 6734 status_t err = mCodec->mOMXNode->sendCommand(OMX_CommandStateSet, OMX_StateIdle); 6735 if (err != OK) { 6736 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6737 } else { 6738 mCodec->changeState(mCodec->mLoadedToIdleState); 6739 } 6740} 6741 6742//////////////////////////////////////////////////////////////////////////////// 6743 6744ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 6745 : BaseState(codec) { 6746} 6747 6748void ACodec::LoadedToIdleState::stateEntered() { 6749 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 6750 6751 status_t err; 6752 if ((err = allocateBuffers()) != OK) { 6753 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 6754 "(error 0x%08x)", 6755 err); 6756 6757 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6758 6759 mCodec->mOMXNode->sendCommand( 6760 OMX_CommandStateSet, OMX_StateLoaded); 6761 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 6762 mCodec->freeBuffersOnPort(kPortIndexInput); 6763 } 6764 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 6765 mCodec->freeBuffersOnPort(kPortIndexOutput); 6766 } 6767 6768 mCodec->changeState(mCodec->mLoadedState); 6769 } 6770} 6771 6772status_t ACodec::LoadedToIdleState::allocateBuffers() { 6773 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 6774 6775 if (err != OK) { 6776 return err; 6777 } 6778 6779 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 6780} 6781 6782bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 6783 switch (msg->what()) { 6784 case kWhatSetParameters: 6785 case kWhatShutdown: 6786 { 6787 mCodec->deferMessage(msg); 6788 return true; 6789 } 6790 6791 case kWhatSignalEndOfInputStream: 6792 { 6793 mCodec->onSignalEndOfInputStream(); 6794 return true; 6795 } 6796 6797 case kWhatResume: 6798 { 6799 // We'll be active soon enough. 6800 return true; 6801 } 6802 6803 case kWhatFlush: 6804 { 6805 // We haven't even started yet, so we're flushed alright... 6806 sp<AMessage> notify = mCodec->mNotify->dup(); 6807 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6808 notify->post(); 6809 return true; 6810 } 6811 6812 default: 6813 return BaseState::onMessageReceived(msg); 6814 } 6815} 6816 6817bool ACodec::LoadedToIdleState::onOMXEvent( 6818 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6819 switch (event) { 6820 case OMX_EventCmdComplete: 6821 { 6822 status_t err = OK; 6823 if (data1 != (OMX_U32)OMX_CommandStateSet 6824 || data2 != (OMX_U32)OMX_StateIdle) { 6825 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 6826 asString((OMX_COMMANDTYPE)data1), data1, 6827 asString((OMX_STATETYPE)data2), data2); 6828 err = FAILED_TRANSACTION; 6829 } 6830 6831 if (err == OK) { 6832 err = mCodec->mOMXNode->sendCommand( 6833 OMX_CommandStateSet, OMX_StateExecuting); 6834 } 6835 6836 if (err != OK) { 6837 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6838 } else { 6839 mCodec->changeState(mCodec->mIdleToExecutingState); 6840 } 6841 6842 return true; 6843 } 6844 6845 default: 6846 return BaseState::onOMXEvent(event, data1, data2); 6847 } 6848} 6849 6850//////////////////////////////////////////////////////////////////////////////// 6851 6852ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 6853 : BaseState(codec) { 6854} 6855 6856void ACodec::IdleToExecutingState::stateEntered() { 6857 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 6858} 6859 6860bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6861 switch (msg->what()) { 6862 case kWhatSetParameters: 6863 case kWhatShutdown: 6864 { 6865 mCodec->deferMessage(msg); 6866 return true; 6867 } 6868 6869 case kWhatResume: 6870 { 6871 // We'll be active soon enough. 6872 return true; 6873 } 6874 6875 case kWhatFlush: 6876 { 6877 // We haven't even started yet, so we're flushed alright... 6878 sp<AMessage> notify = mCodec->mNotify->dup(); 6879 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6880 notify->post(); 6881 6882 return true; 6883 } 6884 6885 case kWhatSignalEndOfInputStream: 6886 { 6887 mCodec->onSignalEndOfInputStream(); 6888 return true; 6889 } 6890 6891 default: 6892 return BaseState::onMessageReceived(msg); 6893 } 6894} 6895 6896bool ACodec::IdleToExecutingState::onOMXEvent( 6897 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6898 switch (event) { 6899 case OMX_EventCmdComplete: 6900 { 6901 if (data1 != (OMX_U32)OMX_CommandStateSet 6902 || data2 != (OMX_U32)OMX_StateExecuting) { 6903 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 6904 asString((OMX_COMMANDTYPE)data1), data1, 6905 asString((OMX_STATETYPE)data2), data2); 6906 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6907 return true; 6908 } 6909 6910 mCodec->mExecutingState->resume(); 6911 mCodec->changeState(mCodec->mExecutingState); 6912 6913 return true; 6914 } 6915 6916 default: 6917 return BaseState::onOMXEvent(event, data1, data2); 6918 } 6919} 6920 6921//////////////////////////////////////////////////////////////////////////////// 6922 6923ACodec::ExecutingState::ExecutingState(ACodec *codec) 6924 : BaseState(codec), 6925 mActive(false) { 6926} 6927 6928ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 6929 OMX_U32 /* portIndex */) { 6930 return RESUBMIT_BUFFERS; 6931} 6932 6933void ACodec::ExecutingState::submitOutputMetaBuffers() { 6934 // submit as many buffers as there are input buffers with the codec 6935 // in case we are in port reconfiguring 6936 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 6937 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6938 6939 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 6940 if (mCodec->submitOutputMetadataBuffer() != OK) 6941 break; 6942 } 6943 } 6944 6945 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 6946 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 6947} 6948 6949void ACodec::ExecutingState::submitRegularOutputBuffers() { 6950 bool failed = false; 6951 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 6952 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 6953 6954 if (mCodec->mNativeWindow != NULL) { 6955 if (info->mStatus != BufferInfo::OWNED_BY_US 6956 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6957 ALOGE("buffers should be owned by us or the surface"); 6958 failed = true; 6959 break; 6960 } 6961 6962 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6963 continue; 6964 } 6965 } else { 6966 if (info->mStatus != BufferInfo::OWNED_BY_US) { 6967 ALOGE("buffers should be owned by us"); 6968 failed = true; 6969 break; 6970 } 6971 } 6972 6973 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 6974 6975 info->checkWriteFence("submitRegularOutputBuffers"); 6976 status_t err = mCodec->mOMXNode->fillBuffer(info->mBufferID, info->mFenceFd); 6977 info->mFenceFd = -1; 6978 if (err != OK) { 6979 failed = true; 6980 break; 6981 } 6982 6983 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6984 } 6985 6986 if (failed) { 6987 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6988 } 6989} 6990 6991void ACodec::ExecutingState::submitOutputBuffers() { 6992 submitRegularOutputBuffers(); 6993 if (mCodec->storingMetadataInDecodedBuffers()) { 6994 submitOutputMetaBuffers(); 6995 } 6996} 6997 6998void ACodec::ExecutingState::resume() { 6999 if (mActive) { 7000 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 7001 return; 7002 } 7003 7004 submitOutputBuffers(); 7005 7006 // Post all available input buffers 7007 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 7008 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 7009 } 7010 7011 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 7012 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7013 if (info->mStatus == BufferInfo::OWNED_BY_US) { 7014 postFillThisBuffer(info); 7015 } 7016 } 7017 7018 mActive = true; 7019} 7020 7021void ACodec::ExecutingState::stateEntered() { 7022 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 7023 7024 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7025 mCodec->processDeferredMessages(); 7026} 7027 7028bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 7029 bool handled = false; 7030 7031 switch (msg->what()) { 7032 case kWhatShutdown: 7033 { 7034 int32_t keepComponentAllocated; 7035 CHECK(msg->findInt32( 7036 "keepComponentAllocated", &keepComponentAllocated)); 7037 7038 mCodec->mShutdownInProgress = true; 7039 mCodec->mExplicitShutdown = true; 7040 mCodec->mKeepComponentAllocated = keepComponentAllocated; 7041 7042 mActive = false; 7043 7044 status_t err = mCodec->mOMXNode->sendCommand( 7045 OMX_CommandStateSet, OMX_StateIdle); 7046 if (err != OK) { 7047 if (keepComponentAllocated) { 7048 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7049 } 7050 // TODO: do some recovery here. 7051 } else { 7052 mCodec->changeState(mCodec->mExecutingToIdleState); 7053 } 7054 7055 handled = true; 7056 break; 7057 } 7058 7059 case kWhatFlush: 7060 { 7061 ALOGV("[%s] ExecutingState flushing now " 7062 "(codec owns %zu/%zu input, %zu/%zu output).", 7063 mCodec->mComponentName.c_str(), 7064 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 7065 mCodec->mBuffers[kPortIndexInput].size(), 7066 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 7067 mCodec->mBuffers[kPortIndexOutput].size()); 7068 7069 mActive = false; 7070 7071 status_t err = mCodec->mOMXNode->sendCommand(OMX_CommandFlush, OMX_ALL); 7072 if (err != OK) { 7073 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7074 } else { 7075 mCodec->changeState(mCodec->mFlushingState); 7076 } 7077 7078 handled = true; 7079 break; 7080 } 7081 7082 case kWhatResume: 7083 { 7084 resume(); 7085 7086 handled = true; 7087 break; 7088 } 7089 7090 case kWhatRequestIDRFrame: 7091 { 7092 status_t err = mCodec->requestIDRFrame(); 7093 if (err != OK) { 7094 ALOGW("Requesting an IDR frame failed."); 7095 } 7096 7097 handled = true; 7098 break; 7099 } 7100 7101 case kWhatSetParameters: 7102 { 7103 sp<AMessage> params; 7104 CHECK(msg->findMessage("params", ¶ms)); 7105 7106 status_t err = mCodec->setParameters(params); 7107 7108 sp<AMessage> reply; 7109 if (msg->findMessage("reply", &reply)) { 7110 reply->setInt32("err", err); 7111 reply->post(); 7112 } 7113 7114 handled = true; 7115 break; 7116 } 7117 7118 case ACodec::kWhatSignalEndOfInputStream: 7119 { 7120 mCodec->onSignalEndOfInputStream(); 7121 handled = true; 7122 break; 7123 } 7124 7125 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7126 case kWhatSubmitOutputMetadataBufferIfEOS: 7127 { 7128 if (mCodec->mPortEOS[kPortIndexInput] && 7129 !mCodec->mPortEOS[kPortIndexOutput]) { 7130 status_t err = mCodec->submitOutputMetadataBuffer(); 7131 if (err == OK) { 7132 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7133 } 7134 } 7135 return true; 7136 } 7137 7138 default: 7139 handled = BaseState::onMessageReceived(msg); 7140 break; 7141 } 7142 7143 return handled; 7144} 7145 7146status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 7147 int32_t videoBitrate; 7148 if (params->findInt32("video-bitrate", &videoBitrate)) { 7149 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 7150 InitOMXParams(&configParams); 7151 configParams.nPortIndex = kPortIndexOutput; 7152 configParams.nEncodeBitrate = videoBitrate; 7153 7154 status_t err = mOMXNode->setConfig( 7155 OMX_IndexConfigVideoBitrate, 7156 &configParams, 7157 sizeof(configParams)); 7158 7159 if (err != OK) { 7160 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 7161 videoBitrate, err); 7162 7163 return err; 7164 } 7165 } 7166 7167 int64_t timeOffsetUs; 7168 if (params->findInt64("time-offset-us", &timeOffsetUs)) { 7169 if (mGraphicBufferSource == NULL) { 7170 ALOGE("[%s] Invalid to set input buffer time offset without surface", 7171 mComponentName.c_str()); 7172 return INVALID_OPERATION; 7173 } 7174 7175 status_t err = statusFromBinderStatus( 7176 mGraphicBufferSource->setTimeOffsetUs(timeOffsetUs)); 7177 7178 if (err != OK) { 7179 ALOGE("[%s] Unable to set input buffer time offset (err %d)", 7180 mComponentName.c_str(), 7181 err); 7182 return err; 7183 } 7184 } 7185 7186 int64_t skipFramesBeforeUs; 7187 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 7188 if (mGraphicBufferSource == NULL) { 7189 ALOGE("[%s] Invalid to set start time without surface", 7190 mComponentName.c_str()); 7191 return INVALID_OPERATION; 7192 } 7193 7194 status_t err = statusFromBinderStatus( 7195 mGraphicBufferSource->setStartTimeUs(skipFramesBeforeUs)); 7196 7197 if (err != OK) { 7198 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 7199 return err; 7200 } 7201 } 7202 7203 int32_t dropInputFrames; 7204 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 7205 if (mGraphicBufferSource == NULL) { 7206 ALOGE("[%s] Invalid to set suspend without surface", 7207 mComponentName.c_str()); 7208 return INVALID_OPERATION; 7209 } 7210 7211 status_t err = statusFromBinderStatus( 7212 mGraphicBufferSource->setSuspend(dropInputFrames != 0)); 7213 7214 if (err != OK) { 7215 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 7216 return err; 7217 } 7218 } 7219 7220 int32_t dummy; 7221 if (params->findInt32("request-sync", &dummy)) { 7222 status_t err = requestIDRFrame(); 7223 7224 if (err != OK) { 7225 ALOGE("Requesting a sync frame failed w/ err %d", err); 7226 return err; 7227 } 7228 } 7229 7230 float rate; 7231 if (params->findFloat("operating-rate", &rate) && rate > 0) { 7232 status_t err = setOperatingRate(rate, mIsVideo); 7233 if (err != OK) { 7234 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 7235 return err; 7236 } 7237 } 7238 7239 int32_t intraRefreshPeriod = 0; 7240 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 7241 && intraRefreshPeriod > 0) { 7242 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 7243 if (err != OK) { 7244 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 7245 mComponentName.c_str()); 7246 err = OK; 7247 } 7248 } 7249 7250 status_t err = configureTemporalLayers(params, false /* inConfigure */, mOutputFormat); 7251 if (err != OK) { 7252 err = OK; // ignore failure 7253 } 7254 7255 return err; 7256} 7257 7258void ACodec::onSignalEndOfInputStream() { 7259 sp<AMessage> notify = mNotify->dup(); 7260 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 7261 7262 status_t err = INVALID_OPERATION; 7263 if (mGraphicBufferSource != NULL) { 7264 err = statusFromBinderStatus(mGraphicBufferSource->signalEndOfInputStream()); 7265 } 7266 if (err != OK) { 7267 notify->setInt32("err", err); 7268 } 7269 notify->post(); 7270} 7271 7272bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 7273 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7274 return true; 7275} 7276 7277bool ACodec::ExecutingState::onOMXEvent( 7278 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7279 switch (event) { 7280 case OMX_EventPortSettingsChanged: 7281 { 7282 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 7283 7284 mCodec->onOutputFormatChanged(); 7285 7286 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 7287 mCodec->mMetadataBuffersToSubmit = 0; 7288 CHECK_EQ(mCodec->mOMXNode->sendCommand( 7289 OMX_CommandPortDisable, kPortIndexOutput), 7290 (status_t)OK); 7291 7292 mCodec->freeOutputBuffersNotOwnedByComponent(); 7293 7294 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7295 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7296 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7297 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7298 mCodec->mComponentName.c_str(), data2); 7299 } 7300 7301 return true; 7302 } 7303 7304 case OMX_EventBufferFlag: 7305 { 7306 return true; 7307 } 7308 7309 default: 7310 return BaseState::onOMXEvent(event, data1, data2); 7311 } 7312} 7313 7314//////////////////////////////////////////////////////////////////////////////// 7315 7316ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7317 ACodec *codec) 7318 : BaseState(codec) { 7319} 7320 7321ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7322 OMX_U32 portIndex) { 7323 if (portIndex == kPortIndexOutput) { 7324 return FREE_BUFFERS; 7325 } 7326 7327 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7328 7329 return RESUBMIT_BUFFERS; 7330} 7331 7332bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7333 const sp<AMessage> &msg) { 7334 bool handled = false; 7335 7336 switch (msg->what()) { 7337 case kWhatFlush: 7338 case kWhatShutdown: 7339 case kWhatResume: 7340 case kWhatSetParameters: 7341 { 7342 if (msg->what() == kWhatResume) { 7343 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7344 } 7345 7346 mCodec->deferMessage(msg); 7347 handled = true; 7348 break; 7349 } 7350 7351 default: 7352 handled = BaseState::onMessageReceived(msg); 7353 break; 7354 } 7355 7356 return handled; 7357} 7358 7359void ACodec::OutputPortSettingsChangedState::stateEntered() { 7360 ALOGV("[%s] Now handling output port settings change", 7361 mCodec->mComponentName.c_str()); 7362} 7363 7364bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7365 int64_t mediaTimeUs, nsecs_t systemNano) { 7366 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7367 return true; 7368} 7369 7370bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7371 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7372 switch (event) { 7373 case OMX_EventCmdComplete: 7374 { 7375 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7376 if (data2 != (OMX_U32)kPortIndexOutput) { 7377 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7378 return false; 7379 } 7380 7381 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7382 7383 status_t err = OK; 7384 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7385 ALOGE("disabled port should be empty, but has %zu buffers", 7386 mCodec->mBuffers[kPortIndexOutput].size()); 7387 err = FAILED_TRANSACTION; 7388 } else { 7389 mCodec->mDealer[kPortIndexOutput].clear(); 7390 } 7391 7392 if (err == OK) { 7393 err = mCodec->mOMXNode->sendCommand( 7394 OMX_CommandPortEnable, kPortIndexOutput); 7395 } 7396 7397 if (err == OK) { 7398 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7399 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7400 "reconfiguration: (%d)", err); 7401 } 7402 7403 if (err != OK) { 7404 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7405 7406 // This is technically not correct, but appears to be 7407 // the only way to free the component instance. 7408 // Controlled transitioning from excecuting->idle 7409 // and idle->loaded seem impossible probably because 7410 // the output port never finishes re-enabling. 7411 mCodec->mShutdownInProgress = true; 7412 mCodec->mKeepComponentAllocated = false; 7413 mCodec->changeState(mCodec->mLoadedState); 7414 } 7415 7416 return true; 7417 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7418 if (data2 != (OMX_U32)kPortIndexOutput) { 7419 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7420 return false; 7421 } 7422 7423 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7424 7425 if (mCodec->mExecutingState->active()) { 7426 mCodec->mExecutingState->submitOutputBuffers(); 7427 } 7428 7429 mCodec->changeState(mCodec->mExecutingState); 7430 7431 return true; 7432 } 7433 7434 return false; 7435 } 7436 7437 default: 7438 return false; 7439 } 7440} 7441 7442//////////////////////////////////////////////////////////////////////////////// 7443 7444ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7445 : BaseState(codec), 7446 mComponentNowIdle(false) { 7447} 7448 7449bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7450 bool handled = false; 7451 7452 switch (msg->what()) { 7453 case kWhatFlush: 7454 { 7455 // Don't send me a flush request if you previously wanted me 7456 // to shutdown. 7457 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7458 break; 7459 } 7460 7461 case kWhatShutdown: 7462 { 7463 // We're already doing that... 7464 7465 handled = true; 7466 break; 7467 } 7468 7469 default: 7470 handled = BaseState::onMessageReceived(msg); 7471 break; 7472 } 7473 7474 return handled; 7475} 7476 7477void ACodec::ExecutingToIdleState::stateEntered() { 7478 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7479 7480 mComponentNowIdle = false; 7481 mCodec->mLastOutputFormat.clear(); 7482} 7483 7484bool ACodec::ExecutingToIdleState::onOMXEvent( 7485 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7486 switch (event) { 7487 case OMX_EventCmdComplete: 7488 { 7489 if (data1 != (OMX_U32)OMX_CommandStateSet 7490 || data2 != (OMX_U32)OMX_StateIdle) { 7491 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7492 asString((OMX_COMMANDTYPE)data1), data1, 7493 asString((OMX_STATETYPE)data2), data2); 7494 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7495 return true; 7496 } 7497 7498 mComponentNowIdle = true; 7499 7500 changeStateIfWeOwnAllBuffers(); 7501 7502 return true; 7503 } 7504 7505 case OMX_EventPortSettingsChanged: 7506 case OMX_EventBufferFlag: 7507 { 7508 // We're shutting down and don't care about this anymore. 7509 return true; 7510 } 7511 7512 default: 7513 return BaseState::onOMXEvent(event, data1, data2); 7514 } 7515} 7516 7517void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7518 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7519 status_t err = mCodec->mOMXNode->sendCommand( 7520 OMX_CommandStateSet, OMX_StateLoaded); 7521 if (err == OK) { 7522 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7523 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7524 if (err == OK) { 7525 err = err2; 7526 } 7527 } 7528 7529 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7530 && mCodec->mNativeWindow != NULL) { 7531 // We push enough 1x1 blank buffers to ensure that one of 7532 // them has made it to the display. This allows the OMX 7533 // component teardown to zero out any protected buffers 7534 // without the risk of scanning out one of those buffers. 7535 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7536 } 7537 7538 if (err != OK) { 7539 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7540 return; 7541 } 7542 7543 mCodec->changeState(mCodec->mIdleToLoadedState); 7544 } 7545} 7546 7547void ACodec::ExecutingToIdleState::onInputBufferFilled( 7548 const sp<AMessage> &msg) { 7549 BaseState::onInputBufferFilled(msg); 7550 7551 changeStateIfWeOwnAllBuffers(); 7552} 7553 7554void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7555 const sp<AMessage> &msg) { 7556 BaseState::onOutputBufferDrained(msg); 7557 7558 changeStateIfWeOwnAllBuffers(); 7559} 7560 7561//////////////////////////////////////////////////////////////////////////////// 7562 7563ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7564 : BaseState(codec) { 7565} 7566 7567bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7568 bool handled = false; 7569 7570 switch (msg->what()) { 7571 case kWhatShutdown: 7572 { 7573 // We're already doing that... 7574 7575 handled = true; 7576 break; 7577 } 7578 7579 case kWhatFlush: 7580 { 7581 // Don't send me a flush request if you previously wanted me 7582 // to shutdown. 7583 ALOGE("Got flush request in IdleToLoadedState"); 7584 break; 7585 } 7586 7587 default: 7588 handled = BaseState::onMessageReceived(msg); 7589 break; 7590 } 7591 7592 return handled; 7593} 7594 7595void ACodec::IdleToLoadedState::stateEntered() { 7596 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7597} 7598 7599bool ACodec::IdleToLoadedState::onOMXEvent( 7600 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7601 switch (event) { 7602 case OMX_EventCmdComplete: 7603 { 7604 if (data1 != (OMX_U32)OMX_CommandStateSet 7605 || data2 != (OMX_U32)OMX_StateLoaded) { 7606 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7607 asString((OMX_COMMANDTYPE)data1), data1, 7608 asString((OMX_STATETYPE)data2), data2); 7609 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7610 return true; 7611 } 7612 7613 mCodec->changeState(mCodec->mLoadedState); 7614 7615 return true; 7616 } 7617 7618 default: 7619 return BaseState::onOMXEvent(event, data1, data2); 7620 } 7621} 7622 7623//////////////////////////////////////////////////////////////////////////////// 7624 7625ACodec::FlushingState::FlushingState(ACodec *codec) 7626 : BaseState(codec) { 7627} 7628 7629void ACodec::FlushingState::stateEntered() { 7630 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7631 7632 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7633} 7634 7635bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7636 bool handled = false; 7637 7638 switch (msg->what()) { 7639 case kWhatShutdown: 7640 { 7641 mCodec->deferMessage(msg); 7642 break; 7643 } 7644 7645 case kWhatFlush: 7646 { 7647 // We're already doing this right now. 7648 handled = true; 7649 break; 7650 } 7651 7652 default: 7653 handled = BaseState::onMessageReceived(msg); 7654 break; 7655 } 7656 7657 return handled; 7658} 7659 7660bool ACodec::FlushingState::onOMXEvent( 7661 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7662 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7663 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7664 7665 switch (event) { 7666 case OMX_EventCmdComplete: 7667 { 7668 if (data1 != (OMX_U32)OMX_CommandFlush) { 7669 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7670 asString((OMX_COMMANDTYPE)data1), data1, data2); 7671 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7672 return true; 7673 } 7674 7675 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7676 if (mFlushComplete[data2]) { 7677 ALOGW("Flush already completed for %s port", 7678 data2 == kPortIndexInput ? "input" : "output"); 7679 return true; 7680 } 7681 mFlushComplete[data2] = true; 7682 7683 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7684 changeStateIfWeOwnAllBuffers(); 7685 } 7686 } else if (data2 == OMX_ALL) { 7687 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7688 ALOGW("received flush complete event for OMX_ALL before ports have been" 7689 "flushed (%d/%d)", 7690 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7691 return false; 7692 } 7693 7694 changeStateIfWeOwnAllBuffers(); 7695 } else { 7696 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7697 } 7698 7699 return true; 7700 } 7701 7702 case OMX_EventPortSettingsChanged: 7703 { 7704 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7705 msg->setInt32("type", omx_message::EVENT); 7706 msg->setInt32("generation", mCodec->mNodeGeneration); 7707 msg->setInt32("event", event); 7708 msg->setInt32("data1", data1); 7709 msg->setInt32("data2", data2); 7710 7711 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7712 mCodec->mComponentName.c_str()); 7713 7714 mCodec->deferMessage(msg); 7715 7716 return true; 7717 } 7718 7719 default: 7720 return BaseState::onOMXEvent(event, data1, data2); 7721 } 7722 7723 return true; 7724} 7725 7726void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7727 BaseState::onOutputBufferDrained(msg); 7728 7729 changeStateIfWeOwnAllBuffers(); 7730} 7731 7732void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7733 BaseState::onInputBufferFilled(msg); 7734 7735 changeStateIfWeOwnAllBuffers(); 7736} 7737 7738void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 7739 if (mFlushComplete[kPortIndexInput] 7740 && mFlushComplete[kPortIndexOutput] 7741 && mCodec->allYourBuffersAreBelongToUs()) { 7742 // We now own all buffers except possibly those still queued with 7743 // the native window for rendering. Let's get those back as well. 7744 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 7745 7746 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7747 7748 sp<AMessage> notify = mCodec->mNotify->dup(); 7749 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7750 notify->post(); 7751 7752 mCodec->mPortEOS[kPortIndexInput] = 7753 mCodec->mPortEOS[kPortIndexOutput] = false; 7754 7755 mCodec->mInputEOSResult = OK; 7756 7757 if (mCodec->mSkipCutBuffer != NULL) { 7758 mCodec->mSkipCutBuffer->clear(); 7759 } 7760 7761 mCodec->changeState(mCodec->mExecutingState); 7762 } 7763} 7764 7765status_t ACodec::queryCapabilities( 7766 const AString &name, const AString &mime, bool isEncoder, 7767 sp<MediaCodecInfo::Capabilities> *caps) { 7768 (*caps).clear(); 7769 const char *role = GetComponentRole(isEncoder, mime.c_str()); 7770 if (role == NULL) { 7771 return BAD_VALUE; 7772 } 7773 7774 OMXClient client; 7775 status_t err = client.connect(); 7776 if (err != OK) { 7777 return err; 7778 } 7779 7780 sp<IOMX> omx = client.interface(); 7781 sp<CodecObserver> observer = new CodecObserver; 7782 sp<IOMXNode> omxNode; 7783 7784 err = omx->allocateNode(name.c_str(), observer, &omxNode); 7785 if (err != OK) { 7786 client.disconnect(); 7787 return err; 7788 } 7789 7790 err = SetComponentRole(omxNode, role); 7791 if (err != OK) { 7792 omxNode->freeNode(); 7793 client.disconnect(); 7794 return err; 7795 } 7796 7797 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 7798 bool isVideo = mime.startsWithIgnoreCase("video/"); 7799 7800 if (isVideo) { 7801 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 7802 InitOMXParams(¶m); 7803 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7804 7805 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7806 param.nProfileIndex = index; 7807 status_t err = omxNode->getParameter( 7808 OMX_IndexParamVideoProfileLevelQuerySupported, 7809 ¶m, sizeof(param)); 7810 if (err != OK) { 7811 break; 7812 } 7813 builder->addProfileLevel(param.eProfile, param.eLevel); 7814 7815 if (index == kMaxIndicesToCheck) { 7816 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 7817 name.c_str(), index, 7818 param.eProfile, param.eLevel); 7819 } 7820 } 7821 7822 // Color format query 7823 // return colors in the order reported by the OMX component 7824 // prefix "flexible" standard ones with the flexible equivalent 7825 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 7826 InitOMXParams(&portFormat); 7827 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 7828 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 7829 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7830 portFormat.nIndex = index; 7831 status_t err = omxNode->getParameter( 7832 OMX_IndexParamVideoPortFormat, 7833 &portFormat, sizeof(portFormat)); 7834 if (err != OK) { 7835 break; 7836 } 7837 7838 OMX_U32 flexibleEquivalent; 7839 if (IsFlexibleColorFormat( 7840 omxNode, portFormat.eColorFormat, false /* usingNativeWindow */, 7841 &flexibleEquivalent)) { 7842 bool marked = false; 7843 for (size_t i = 0; i < supportedColors.size(); ++i) { 7844 if (supportedColors[i] == flexibleEquivalent) { 7845 marked = true; 7846 break; 7847 } 7848 } 7849 if (!marked) { 7850 supportedColors.push(flexibleEquivalent); 7851 builder->addColorFormat(flexibleEquivalent); 7852 } 7853 } 7854 supportedColors.push(portFormat.eColorFormat); 7855 builder->addColorFormat(portFormat.eColorFormat); 7856 7857 if (index == kMaxIndicesToCheck) { 7858 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 7859 name.c_str(), index, 7860 asString(portFormat.eColorFormat), portFormat.eColorFormat); 7861 } 7862 } 7863 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 7864 // More audio codecs if they have profiles. 7865 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 7866 InitOMXParams(¶m); 7867 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7868 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7869 param.nProfileIndex = index; 7870 status_t err = omxNode->getParameter( 7871 (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 7872 ¶m, sizeof(param)); 7873 if (err != OK) { 7874 break; 7875 } 7876 // For audio, level is ignored. 7877 builder->addProfileLevel(param.eProfile, 0 /* level */); 7878 7879 if (index == kMaxIndicesToCheck) { 7880 ALOGW("[%s] stopping checking profiles after %u: %x", 7881 name.c_str(), index, 7882 param.eProfile); 7883 } 7884 } 7885 7886 // NOTE: Without Android extensions, OMX does not provide a way to query 7887 // AAC profile support 7888 if (param.nProfileIndex == 0) { 7889 ALOGW("component %s doesn't support profile query.", name.c_str()); 7890 } 7891 } 7892 7893 if (isVideo && !isEncoder) { 7894 native_handle_t *sidebandHandle = NULL; 7895 if (omxNode->configureVideoTunnelMode( 7896 kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 7897 // tunneled playback includes adaptive playback 7898 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 7899 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 7900 } else if (omxNode->storeMetaDataInBuffers( 7901 kPortIndexOutput, OMX_TRUE) == OK || 7902 omxNode->prepareForAdaptivePlayback( 7903 kPortIndexOutput, OMX_TRUE, 7904 1280 /* width */, 720 /* height */) == OK) { 7905 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 7906 } 7907 } 7908 7909 if (isVideo && isEncoder) { 7910 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 7911 InitOMXParams(¶ms); 7912 params.nPortIndex = kPortIndexOutput; 7913 // TODO: should we verify if fallback is supported? 7914 if (omxNode->getConfig( 7915 (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 7916 ¶ms, sizeof(params)) == OK) { 7917 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 7918 } 7919 } 7920 7921 *caps = builder; 7922 omxNode->freeNode(); 7923 client.disconnect(); 7924 return OK; 7925} 7926 7927// These are supposed be equivalent to the logic in 7928// "audio_channel_out_mask_from_count". 7929//static 7930status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 7931 switch (numChannels) { 7932 case 1: 7933 map[0] = OMX_AUDIO_ChannelCF; 7934 break; 7935 case 2: 7936 map[0] = OMX_AUDIO_ChannelLF; 7937 map[1] = OMX_AUDIO_ChannelRF; 7938 break; 7939 case 3: 7940 map[0] = OMX_AUDIO_ChannelLF; 7941 map[1] = OMX_AUDIO_ChannelRF; 7942 map[2] = OMX_AUDIO_ChannelCF; 7943 break; 7944 case 4: 7945 map[0] = OMX_AUDIO_ChannelLF; 7946 map[1] = OMX_AUDIO_ChannelRF; 7947 map[2] = OMX_AUDIO_ChannelLR; 7948 map[3] = OMX_AUDIO_ChannelRR; 7949 break; 7950 case 5: 7951 map[0] = OMX_AUDIO_ChannelLF; 7952 map[1] = OMX_AUDIO_ChannelRF; 7953 map[2] = OMX_AUDIO_ChannelCF; 7954 map[3] = OMX_AUDIO_ChannelLR; 7955 map[4] = OMX_AUDIO_ChannelRR; 7956 break; 7957 case 6: 7958 map[0] = OMX_AUDIO_ChannelLF; 7959 map[1] = OMX_AUDIO_ChannelRF; 7960 map[2] = OMX_AUDIO_ChannelCF; 7961 map[3] = OMX_AUDIO_ChannelLFE; 7962 map[4] = OMX_AUDIO_ChannelLR; 7963 map[5] = OMX_AUDIO_ChannelRR; 7964 break; 7965 case 7: 7966 map[0] = OMX_AUDIO_ChannelLF; 7967 map[1] = OMX_AUDIO_ChannelRF; 7968 map[2] = OMX_AUDIO_ChannelCF; 7969 map[3] = OMX_AUDIO_ChannelLFE; 7970 map[4] = OMX_AUDIO_ChannelLR; 7971 map[5] = OMX_AUDIO_ChannelRR; 7972 map[6] = OMX_AUDIO_ChannelCS; 7973 break; 7974 case 8: 7975 map[0] = OMX_AUDIO_ChannelLF; 7976 map[1] = OMX_AUDIO_ChannelRF; 7977 map[2] = OMX_AUDIO_ChannelCF; 7978 map[3] = OMX_AUDIO_ChannelLFE; 7979 map[4] = OMX_AUDIO_ChannelLR; 7980 map[5] = OMX_AUDIO_ChannelRR; 7981 map[6] = OMX_AUDIO_ChannelLS; 7982 map[7] = OMX_AUDIO_ChannelRS; 7983 break; 7984 default: 7985 return -EINVAL; 7986 } 7987 7988 return OK; 7989} 7990 7991} // namespace android 7992