ACodec.cpp revision fd44d8e2f2d37184f7add67125657f3fbfb5a085
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47 48#include <OMX_AudioExt.h> 49#include <OMX_VideoExt.h> 50#include <OMX_Component.h> 51#include <OMX_IndexExt.h> 52#include <OMX_AsString.h> 53 54#include "include/avc_utils.h" 55#include "include/DataConverter.h" 56#include "include/SecureBuffer.h" 57#include "include/SharedMemoryBuffer.h" 58#include "omx/OMXUtils.h" 59 60namespace android { 61 62using binder::Status; 63 64enum { 65 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 66}; 67 68// OMX errors are directly mapped into status_t range if 69// there is no corresponding MediaError status code. 70// Use the statusFromOMXError(int32_t omxError) function. 71// 72// Currently this is a direct map. 73// See frameworks/native/include/media/openmax/OMX_Core.h 74// 75// Vendor OMX errors from 0x90000000 - 0x9000FFFF 76// Extension OMX errors from 0x8F000000 - 0x90000000 77// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 78// 79 80// returns true if err is a recognized OMX error code. 81// as OMX error is OMX_S32, this is an int32_t type 82static inline bool isOMXError(int32_t err) { 83 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 84} 85 86// converts an OMX error to a status_t 87static inline status_t statusFromOMXError(int32_t omxError) { 88 switch (omxError) { 89 case OMX_ErrorInvalidComponentName: 90 case OMX_ErrorComponentNotFound: 91 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 92 default: 93 return isOMXError(omxError) ? omxError : 0; // no translation required 94 } 95} 96 97static inline status_t statusFromBinderStatus(const Status &status) { 98 if (status.isOk()) { 99 return OK; 100 } 101 status_t err; 102 if ((err = status.serviceSpecificErrorCode()) != OK) { 103 return err; 104 } 105 if ((err = status.transactionError()) != OK) { 106 return err; 107 } 108 // Other exception 109 return UNKNOWN_ERROR; 110} 111 112// checks and converts status_t to a non-side-effect status_t 113static inline status_t makeNoSideEffectStatus(status_t err) { 114 switch (err) { 115 // the following errors have side effects and may come 116 // from other code modules. Remap for safety reasons. 117 case INVALID_OPERATION: 118 case DEAD_OBJECT: 119 return UNKNOWN_ERROR; 120 default: 121 return err; 122 } 123} 124 125struct MessageList : public RefBase { 126 MessageList() { 127 } 128 virtual ~MessageList() { 129 } 130 std::list<sp<AMessage> > &getList() { return mList; } 131private: 132 std::list<sp<AMessage> > mList; 133 134 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 135}; 136 137static sp<DataConverter> getCopyConverter() { 138 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited 139 static sp<DataConverter> sCopyConverter; // zero-inited 140 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); 141 return sCopyConverter; 142} 143 144struct CodecObserver : public BnOMXObserver { 145 CodecObserver() {} 146 147 void setNotificationMessage(const sp<AMessage> &msg) { 148 mNotify = msg; 149 } 150 151 // from IOMXObserver 152 virtual void onMessages(const std::list<omx_message> &messages) { 153 if (messages.empty()) { 154 return; 155 } 156 157 sp<AMessage> notify = mNotify->dup(); 158 sp<MessageList> msgList = new MessageList(); 159 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 160 it != messages.cend(); ++it) { 161 const omx_message &omx_msg = *it; 162 163 sp<AMessage> msg = new AMessage; 164 msg->setInt32("type", omx_msg.type); 165 switch (omx_msg.type) { 166 case omx_message::EVENT: 167 { 168 msg->setInt32("event", omx_msg.u.event_data.event); 169 msg->setInt32("data1", omx_msg.u.event_data.data1); 170 msg->setInt32("data2", omx_msg.u.event_data.data2); 171 break; 172 } 173 174 case omx_message::EMPTY_BUFFER_DONE: 175 { 176 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 177 msg->setInt32("fence_fd", omx_msg.fenceFd); 178 break; 179 } 180 181 case omx_message::FILL_BUFFER_DONE: 182 { 183 msg->setInt32( 184 "buffer", omx_msg.u.extended_buffer_data.buffer); 185 msg->setInt32( 186 "range_offset", 187 omx_msg.u.extended_buffer_data.range_offset); 188 msg->setInt32( 189 "range_length", 190 omx_msg.u.extended_buffer_data.range_length); 191 msg->setInt32( 192 "flags", 193 omx_msg.u.extended_buffer_data.flags); 194 msg->setInt64( 195 "timestamp", 196 omx_msg.u.extended_buffer_data.timestamp); 197 msg->setInt32( 198 "fence_fd", omx_msg.fenceFd); 199 break; 200 } 201 202 case omx_message::FRAME_RENDERED: 203 { 204 msg->setInt64( 205 "media_time_us", omx_msg.u.render_data.timestamp); 206 msg->setInt64( 207 "system_nano", omx_msg.u.render_data.nanoTime); 208 break; 209 } 210 211 default: 212 ALOGE("Unrecognized message type: %d", omx_msg.type); 213 break; 214 } 215 msgList->getList().push_back(msg); 216 } 217 notify->setObject("messages", msgList); 218 notify->post(); 219 } 220 221protected: 222 virtual ~CodecObserver() {} 223 224private: 225 sp<AMessage> mNotify; 226 227 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 228}; 229 230//////////////////////////////////////////////////////////////////////////////// 231 232struct ACodec::BaseState : public AState { 233 explicit BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 234 235protected: 236 enum PortMode { 237 KEEP_BUFFERS, 238 RESUBMIT_BUFFERS, 239 FREE_BUFFERS, 240 }; 241 242 ACodec *mCodec; 243 244 virtual PortMode getPortMode(OMX_U32 portIndex); 245 246 virtual bool onMessageReceived(const sp<AMessage> &msg); 247 248 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 249 250 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 251 virtual void onInputBufferFilled(const sp<AMessage> &msg); 252 253 void postFillThisBuffer(BufferInfo *info); 254 255private: 256 // Handles an OMX message. Returns true iff message was handled. 257 bool onOMXMessage(const sp<AMessage> &msg); 258 259 // Handles a list of messages. Returns true iff messages were handled. 260 bool onOMXMessageList(const sp<AMessage> &msg); 261 262 // returns true iff this message is for this component and the component is alive 263 bool checkOMXMessage(const sp<AMessage> &msg); 264 265 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 266 267 bool onOMXFillBufferDone( 268 IOMX::buffer_id bufferID, 269 size_t rangeOffset, size_t rangeLength, 270 OMX_U32 flags, 271 int64_t timeUs, 272 int fenceFd); 273 274 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 275 276 void getMoreInputDataIfPossible(); 277 278 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 279}; 280 281//////////////////////////////////////////////////////////////////////////////// 282 283struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 284 explicit DeathNotifier(const sp<AMessage> ¬ify) 285 : mNotify(notify) { 286 } 287 288 virtual void binderDied(const wp<IBinder> &) { 289 mNotify->post(); 290 } 291 292protected: 293 virtual ~DeathNotifier() {} 294 295private: 296 sp<AMessage> mNotify; 297 298 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 299}; 300 301struct ACodec::UninitializedState : public ACodec::BaseState { 302 explicit UninitializedState(ACodec *codec); 303 304protected: 305 virtual bool onMessageReceived(const sp<AMessage> &msg); 306 virtual void stateEntered(); 307 308private: 309 void onSetup(const sp<AMessage> &msg); 310 bool onAllocateComponent(const sp<AMessage> &msg); 311 312 sp<DeathNotifier> mDeathNotifier; 313 314 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 315}; 316 317//////////////////////////////////////////////////////////////////////////////// 318 319struct ACodec::LoadedState : public ACodec::BaseState { 320 explicit LoadedState(ACodec *codec); 321 322protected: 323 virtual bool onMessageReceived(const sp<AMessage> &msg); 324 virtual void stateEntered(); 325 326private: 327 friend struct ACodec::UninitializedState; 328 329 bool onConfigureComponent(const sp<AMessage> &msg); 330 void onCreateInputSurface(const sp<AMessage> &msg); 331 void onSetInputSurface(const sp<AMessage> &msg); 332 void onStart(); 333 void onShutdown(bool keepComponentAllocated); 334 335 status_t setupInputSurface(); 336 337 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 338}; 339 340//////////////////////////////////////////////////////////////////////////////// 341 342struct ACodec::LoadedToIdleState : public ACodec::BaseState { 343 explicit LoadedToIdleState(ACodec *codec); 344 345protected: 346 virtual bool onMessageReceived(const sp<AMessage> &msg); 347 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 348 virtual void stateEntered(); 349 350private: 351 status_t allocateBuffers(); 352 353 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 354}; 355 356//////////////////////////////////////////////////////////////////////////////// 357 358struct ACodec::IdleToExecutingState : public ACodec::BaseState { 359 explicit IdleToExecutingState(ACodec *codec); 360 361protected: 362 virtual bool onMessageReceived(const sp<AMessage> &msg); 363 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 364 virtual void stateEntered(); 365 366private: 367 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 368}; 369 370//////////////////////////////////////////////////////////////////////////////// 371 372struct ACodec::ExecutingState : public ACodec::BaseState { 373 explicit ExecutingState(ACodec *codec); 374 375 void submitRegularOutputBuffers(); 376 void submitOutputMetaBuffers(); 377 void submitOutputBuffers(); 378 379 // Submit output buffers to the decoder, submit input buffers to client 380 // to fill with data. 381 void resume(); 382 383 // Returns true iff input and output buffers are in play. 384 bool active() const { return mActive; } 385 386protected: 387 virtual PortMode getPortMode(OMX_U32 portIndex); 388 virtual bool onMessageReceived(const sp<AMessage> &msg); 389 virtual void stateEntered(); 390 391 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 392 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 393 394private: 395 bool mActive; 396 397 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 398}; 399 400//////////////////////////////////////////////////////////////////////////////// 401 402struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 403 explicit OutputPortSettingsChangedState(ACodec *codec); 404 405protected: 406 virtual PortMode getPortMode(OMX_U32 portIndex); 407 virtual bool onMessageReceived(const sp<AMessage> &msg); 408 virtual void stateEntered(); 409 410 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 411 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 412 413private: 414 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 415}; 416 417//////////////////////////////////////////////////////////////////////////////// 418 419struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 420 explicit ExecutingToIdleState(ACodec *codec); 421 422protected: 423 virtual bool onMessageReceived(const sp<AMessage> &msg); 424 virtual void stateEntered(); 425 426 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 427 428 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 429 virtual void onInputBufferFilled(const sp<AMessage> &msg); 430 431private: 432 void changeStateIfWeOwnAllBuffers(); 433 434 bool mComponentNowIdle; 435 436 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 437}; 438 439//////////////////////////////////////////////////////////////////////////////// 440 441struct ACodec::IdleToLoadedState : public ACodec::BaseState { 442 explicit IdleToLoadedState(ACodec *codec); 443 444protected: 445 virtual bool onMessageReceived(const sp<AMessage> &msg); 446 virtual void stateEntered(); 447 448 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 449 450private: 451 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 452}; 453 454//////////////////////////////////////////////////////////////////////////////// 455 456struct ACodec::FlushingState : public ACodec::BaseState { 457 explicit FlushingState(ACodec *codec); 458 459protected: 460 virtual bool onMessageReceived(const sp<AMessage> &msg); 461 virtual void stateEntered(); 462 463 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 464 465 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 466 virtual void onInputBufferFilled(const sp<AMessage> &msg); 467 468private: 469 bool mFlushComplete[2]; 470 471 void changeStateIfWeOwnAllBuffers(); 472 473 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 474}; 475 476//////////////////////////////////////////////////////////////////////////////// 477 478void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 479 if (mFenceFd >= 0) { 480 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 481 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 482 } 483 mFenceFd = fenceFd; 484 mIsReadFence = false; 485} 486 487void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 488 if (mFenceFd >= 0) { 489 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 490 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 491 } 492 mFenceFd = fenceFd; 493 mIsReadFence = true; 494} 495 496void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 497 if (mFenceFd >= 0 && mIsReadFence) { 498 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 499 } 500} 501 502void ACodec::BufferInfo::checkReadFence(const char *dbg) { 503 if (mFenceFd >= 0 && !mIsReadFence) { 504 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 505 } 506} 507 508//////////////////////////////////////////////////////////////////////////////// 509 510ACodec::ACodec() 511 : mSampleRate(0), 512 mNodeGeneration(0), 513 mUsingNativeWindow(false), 514 mNativeWindowUsageBits(0), 515 mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN), 516 mIsVideo(false), 517 mIsEncoder(false), 518 mFatalError(false), 519 mShutdownInProgress(false), 520 mExplicitShutdown(false), 521 mIsLegacyVP9Decoder(false), 522 mEncoderDelay(0), 523 mEncoderPadding(0), 524 mRotationDegrees(0), 525 mChannelMaskPresent(false), 526 mChannelMask(0), 527 mDequeueCounter(0), 528 mInputMetadataType(kMetadataBufferTypeInvalid), 529 mOutputMetadataType(kMetadataBufferTypeInvalid), 530 mLegacyAdaptiveExperiment(false), 531 mMetadataBuffersToSubmit(0), 532 mNumUndequeuedBuffers(0), 533 mRepeatFrameDelayUs(-1ll), 534 mMaxPtsGapUs(-1ll), 535 mMaxFps(-1), 536 mTimePerFrameUs(-1ll), 537 mTimePerCaptureUs(-1ll), 538 mCreateInputBuffersSuspended(false), 539 mTunneled(false), 540 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0), 541 mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0) { 542 mUninitializedState = new UninitializedState(this); 543 mLoadedState = new LoadedState(this); 544 mLoadedToIdleState = new LoadedToIdleState(this); 545 mIdleToExecutingState = new IdleToExecutingState(this); 546 mExecutingState = new ExecutingState(this); 547 548 mOutputPortSettingsChangedState = 549 new OutputPortSettingsChangedState(this); 550 551 mExecutingToIdleState = new ExecutingToIdleState(this); 552 mIdleToLoadedState = new IdleToLoadedState(this); 553 mFlushingState = new FlushingState(this); 554 555 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 556 mInputEOSResult = OK; 557 558 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 559 560 changeState(mUninitializedState); 561} 562 563ACodec::~ACodec() { 564} 565 566void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 567 mNotify = msg; 568} 569 570void ACodec::initiateSetup(const sp<AMessage> &msg) { 571 msg->setWhat(kWhatSetup); 572 msg->setTarget(this); 573 msg->post(); 574} 575 576void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 577 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 578 msg->setMessage("params", params); 579 msg->post(); 580} 581 582void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 583 msg->setWhat(kWhatAllocateComponent); 584 msg->setTarget(this); 585 msg->post(); 586} 587 588void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 589 msg->setWhat(kWhatConfigureComponent); 590 msg->setTarget(this); 591 msg->post(); 592} 593 594status_t ACodec::setSurface(const sp<Surface> &surface) { 595 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 596 msg->setObject("surface", surface); 597 598 sp<AMessage> response; 599 status_t err = msg->postAndAwaitResponse(&response); 600 601 if (err == OK) { 602 (void)response->findInt32("err", &err); 603 } 604 return err; 605} 606 607void ACodec::initiateCreateInputSurface() { 608 (new AMessage(kWhatCreateInputSurface, this))->post(); 609} 610 611void ACodec::initiateSetInputSurface( 612 const sp<PersistentSurface> &surface) { 613 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 614 msg->setObject("input-surface", surface); 615 msg->post(); 616} 617 618void ACodec::signalEndOfInputStream() { 619 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 620} 621 622void ACodec::initiateStart() { 623 (new AMessage(kWhatStart, this))->post(); 624} 625 626void ACodec::signalFlush() { 627 ALOGV("[%s] signalFlush", mComponentName.c_str()); 628 (new AMessage(kWhatFlush, this))->post(); 629} 630 631void ACodec::signalResume() { 632 (new AMessage(kWhatResume, this))->post(); 633} 634 635void ACodec::initiateShutdown(bool keepComponentAllocated) { 636 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 637 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 638 msg->post(); 639 if (!keepComponentAllocated) { 640 // ensure shutdown completes in 3 seconds 641 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 642 } 643} 644 645void ACodec::signalRequestIDRFrame() { 646 (new AMessage(kWhatRequestIDRFrame, this))->post(); 647} 648 649// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 650// Some codecs may return input buffers before having them processed. 651// This causes a halt if we already signaled an EOS on the input 652// port. For now keep submitting an output buffer if there was an 653// EOS on the input port, but not yet on the output port. 654void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 655 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 656 mMetadataBuffersToSubmit > 0) { 657 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 658 } 659} 660 661status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 662 // allow keeping unset surface 663 if (surface == NULL) { 664 if (mNativeWindow != NULL) { 665 ALOGW("cannot unset a surface"); 666 return INVALID_OPERATION; 667 } 668 return OK; 669 } 670 671 // cannot switch from bytebuffers to surface 672 if (mNativeWindow == NULL) { 673 ALOGW("component was not configured with a surface"); 674 return INVALID_OPERATION; 675 } 676 677 ANativeWindow *nativeWindow = surface.get(); 678 // if we have not yet started the codec, we can simply set the native window 679 if (mBuffers[kPortIndexInput].size() == 0) { 680 mNativeWindow = surface; 681 return OK; 682 } 683 684 // we do not support changing a tunneled surface after start 685 if (mTunneled) { 686 ALOGW("cannot change tunneled surface"); 687 return INVALID_OPERATION; 688 } 689 690 int usageBits = 0; 691 // no need to reconnect as we will not dequeue all buffers 692 status_t err = setupNativeWindowSizeFormatAndUsage( 693 nativeWindow, &usageBits, 694 !storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment /* reconnect */); 695 if (err != OK) { 696 return err; 697 } 698 699 int ignoredFlags = kVideoGrallocUsage; 700 // New output surface is not allowed to add new usage flag except ignored ones. 701 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 702 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 703 return BAD_VALUE; 704 } 705 706 // get min undequeued count. We cannot switch to a surface that has a higher 707 // undequeued count than we allocated. 708 int minUndequeuedBuffers = 0; 709 err = nativeWindow->query( 710 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 711 &minUndequeuedBuffers); 712 if (err != 0) { 713 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 714 strerror(-err), -err); 715 return err; 716 } 717 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 718 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 719 minUndequeuedBuffers, mNumUndequeuedBuffers); 720 return BAD_VALUE; 721 } 722 723 // we cannot change the number of output buffers while OMX is running 724 // set up surface to the same count 725 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 726 ALOGV("setting up surface for %zu buffers", buffers.size()); 727 728 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 729 if (err != 0) { 730 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 731 -err); 732 return err; 733 } 734 735 // need to enable allocation when attaching 736 surface->getIGraphicBufferProducer()->allowAllocation(true); 737 738 // for meta data mode, we move dequeud buffers to the new surface. 739 // for non-meta mode, we must move all registered buffers 740 for (size_t i = 0; i < buffers.size(); ++i) { 741 const BufferInfo &info = buffers[i]; 742 // skip undequeued buffers for meta data mode 743 if (storingMetadataInDecodedBuffers() 744 && !mLegacyAdaptiveExperiment 745 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 746 ALOGV("skipping buffer"); 747 continue; 748 } 749 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 750 751 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 752 if (err != OK) { 753 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 754 info.mGraphicBuffer->getNativeBuffer(), 755 strerror(-err), -err); 756 return err; 757 } 758 } 759 760 // cancel undequeued buffers to new surface 761 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 762 for (size_t i = 0; i < buffers.size(); ++i) { 763 BufferInfo &info = buffers.editItemAt(i); 764 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 765 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 766 err = nativeWindow->cancelBuffer( 767 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 768 info.mFenceFd = -1; 769 if (err != OK) { 770 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 771 info.mGraphicBuffer->getNativeBuffer(), 772 strerror(-err), -err); 773 return err; 774 } 775 } 776 } 777 // disallow further allocation 778 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 779 } 780 781 // push blank buffers to previous window if requested 782 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 783 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 784 } 785 786 mNativeWindow = nativeWindow; 787 mNativeWindowUsageBits = usageBits; 788 return OK; 789} 790 791status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 792 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 793 794 CHECK(mDealer[portIndex] == NULL); 795 CHECK(mBuffers[portIndex].isEmpty()); 796 797 status_t err; 798 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 799 if (storingMetadataInDecodedBuffers()) { 800 err = allocateOutputMetadataBuffers(); 801 } else { 802 err = allocateOutputBuffersFromNativeWindow(); 803 } 804 } else { 805 OMX_PARAM_PORTDEFINITIONTYPE def; 806 InitOMXParams(&def); 807 def.nPortIndex = portIndex; 808 809 err = mOMXNode->getParameter( 810 OMX_IndexParamPortDefinition, &def, sizeof(def)); 811 812 if (err == OK) { 813 MetadataBufferType type = 814 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 815 size_t bufSize = def.nBufferSize; 816 if (type == kMetadataBufferTypeANWBuffer) { 817 bufSize = sizeof(VideoNativeMetadata); 818 } else if (type == kMetadataBufferTypeNativeHandleSource) { 819 bufSize = sizeof(VideoNativeHandleMetadata); 820 } 821 822 // If using gralloc or native source input metadata buffers, allocate largest 823 // metadata size as we prefer to generate native source metadata, but component 824 // may require gralloc source. For camera source, allocate at least enough 825 // size for native metadata buffers. 826 size_t allottedSize = bufSize; 827 if (portIndex == kPortIndexInput && type == kMetadataBufferTypeANWBuffer) { 828 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 829 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 830 bufSize = max(bufSize, sizeof(VideoNativeMetadata)); 831 } 832 833 size_t conversionBufferSize = 0; 834 835 sp<DataConverter> converter = mConverter[portIndex]; 836 if (converter != NULL) { 837 // here we assume sane conversions of max 4:1, so result fits in int32 838 if (portIndex == kPortIndexInput) { 839 conversionBufferSize = converter->sourceSize(bufSize); 840 } else { 841 conversionBufferSize = converter->targetSize(bufSize); 842 } 843 } 844 845 size_t alignment = MemoryDealer::getAllocationAlignment(); 846 847 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", 848 mComponentName.c_str(), 849 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 850 portIndex == kPortIndexInput ? "input" : "output"); 851 852 // verify buffer sizes to avoid overflow in align() 853 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { 854 ALOGE("b/22885421"); 855 return NO_MEMORY; 856 } 857 858 // don't modify bufSize as OMX may not expect it to increase after negotiation 859 size_t alignedSize = align(bufSize, alignment); 860 size_t alignedConvSize = align(conversionBufferSize, alignment); 861 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { 862 ALOGE("b/22885421"); 863 return NO_MEMORY; 864 } 865 866 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); 867 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 868 869 const sp<AMessage> &format = 870 portIndex == kPortIndexInput ? mInputFormat : mOutputFormat; 871 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 872 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 873 if (mem == NULL || mem->pointer() == NULL) { 874 return NO_MEMORY; 875 } 876 877 BufferInfo info; 878 info.mStatus = BufferInfo::OWNED_BY_US; 879 info.mFenceFd = -1; 880 info.mRenderInfo = NULL; 881 882 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 883 mem.clear(); 884 885 void *ptr = NULL; 886 sp<NativeHandle> native_handle; 887 err = mOMXNode->allocateSecureBuffer( 888 portIndex, bufSize, &info.mBufferID, 889 &ptr, &native_handle); 890 891 info.mData = (native_handle == NULL) 892 ? new SecureBuffer(format, ptr, bufSize) 893 : new SecureBuffer(format, native_handle, bufSize); 894 info.mCodecData = info.mData; 895 } else { 896 err = mOMXNode->useBuffer( 897 portIndex, mem, &info.mBufferID, allottedSize); 898 } 899 900 if (mem != NULL) { 901 info.mCodecData = new SharedMemoryBuffer(format, mem); 902 info.mCodecRef = mem; 903 904 if (type == kMetadataBufferTypeANWBuffer) { 905 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 906 } 907 908 // if we require conversion, allocate conversion buffer for client use; 909 // otherwise, reuse codec buffer 910 if (mConverter[portIndex] != NULL) { 911 CHECK_GT(conversionBufferSize, (size_t)0); 912 mem = mDealer[portIndex]->allocate(conversionBufferSize); 913 if (mem == NULL|| mem->pointer() == NULL) { 914 return NO_MEMORY; 915 } 916 info.mData = new SharedMemoryBuffer(format, mem); 917 info.mMemRef = mem; 918 } else { 919 info.mData = info.mCodecData; 920 info.mMemRef = info.mCodecRef; 921 } 922 } 923 924 mBuffers[portIndex].push(info); 925 } 926 } 927 } 928 929 if (err != OK) { 930 return err; 931 } 932 933 sp<AMessage> notify = mNotify->dup(); 934 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 935 936 notify->setInt32("portIndex", portIndex); 937 938 sp<PortDescription> desc = new PortDescription; 939 940 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 941 const BufferInfo &info = mBuffers[portIndex][i]; 942 desc->addBuffer(info.mBufferID, info.mData); 943 } 944 945 notify->setObject("portDesc", desc); 946 notify->post(); 947 948 return OK; 949} 950 951status_t ACodec::setupNativeWindowSizeFormatAndUsage( 952 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */, 953 bool reconnect) { 954 OMX_PARAM_PORTDEFINITIONTYPE def; 955 InitOMXParams(&def); 956 def.nPortIndex = kPortIndexOutput; 957 958 status_t err = mOMXNode->getParameter( 959 OMX_IndexParamPortDefinition, &def, sizeof(def)); 960 961 if (err != OK) { 962 return err; 963 } 964 965 OMX_U32 usage = 0; 966 err = mOMXNode->getGraphicBufferUsage(kPortIndexOutput, &usage); 967 if (err != 0) { 968 ALOGW("querying usage flags from OMX IL component failed: %d", err); 969 // XXX: Currently this error is logged, but not fatal. 970 usage = 0; 971 } 972 int omxUsage = usage; 973 974 if (mFlags & kFlagIsGrallocUsageProtected) { 975 usage |= GRALLOC_USAGE_PROTECTED; 976 } 977 978 usage |= kVideoGrallocUsage; 979 *finalUsage = usage; 980 981 memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop)); 982 mLastNativeWindowDataSpace = HAL_DATASPACE_UNKNOWN; 983 984 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 985 return setNativeWindowSizeFormatAndUsage( 986 nativeWindow, 987 def.format.video.nFrameWidth, 988 def.format.video.nFrameHeight, 989 def.format.video.eColorFormat, 990 mRotationDegrees, 991 usage, 992 reconnect); 993} 994 995status_t ACodec::configureOutputBuffersFromNativeWindow( 996 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 997 OMX_U32 *minUndequeuedBuffers, bool preregister) { 998 999 OMX_PARAM_PORTDEFINITIONTYPE def; 1000 InitOMXParams(&def); 1001 def.nPortIndex = kPortIndexOutput; 1002 1003 status_t err = mOMXNode->getParameter( 1004 OMX_IndexParamPortDefinition, &def, sizeof(def)); 1005 1006 if (err == OK) { 1007 err = setupNativeWindowSizeFormatAndUsage( 1008 mNativeWindow.get(), &mNativeWindowUsageBits, preregister /* reconnect */); 1009 } 1010 if (err != OK) { 1011 mNativeWindowUsageBits = 0; 1012 return err; 1013 } 1014 1015 // Exits here for tunneled video playback codecs -- i.e. skips native window 1016 // buffer allocation step as this is managed by the tunneled OMX omponent 1017 // itself and explicitly sets def.nBufferCountActual to 0. 1018 if (mTunneled) { 1019 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 1020 def.nBufferCountActual = 0; 1021 err = mOMXNode->setParameter( 1022 OMX_IndexParamPortDefinition, &def, sizeof(def)); 1023 1024 *minUndequeuedBuffers = 0; 1025 *bufferCount = 0; 1026 *bufferSize = 0; 1027 return err; 1028 } 1029 1030 *minUndequeuedBuffers = 0; 1031 err = mNativeWindow->query( 1032 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 1033 (int *)minUndequeuedBuffers); 1034 1035 if (err != 0) { 1036 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 1037 strerror(-err), -err); 1038 return err; 1039 } 1040 1041 // FIXME: assume that surface is controlled by app (native window 1042 // returns the number for the case when surface is not controlled by app) 1043 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 1044 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 1045 1046 // Use conservative allocation while also trying to reduce starvation 1047 // 1048 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 1049 // minimum needed for the consumer to be able to work 1050 // 2. try to allocate two (2) additional buffers to reduce starvation from 1051 // the consumer 1052 // plus an extra buffer to account for incorrect minUndequeuedBufs 1053 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 1054 OMX_U32 newBufferCount = 1055 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1056 def.nBufferCountActual = newBufferCount; 1057 err = mOMXNode->setParameter( 1058 OMX_IndexParamPortDefinition, &def, sizeof(def)); 1059 1060 if (err == OK) { 1061 *minUndequeuedBuffers += extraBuffers; 1062 break; 1063 } 1064 1065 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1066 mComponentName.c_str(), newBufferCount, err); 1067 /* exit condition */ 1068 if (extraBuffers == 0) { 1069 return err; 1070 } 1071 } 1072 1073 err = native_window_set_buffer_count( 1074 mNativeWindow.get(), def.nBufferCountActual); 1075 1076 if (err != 0) { 1077 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1078 -err); 1079 return err; 1080 } 1081 1082 *bufferCount = def.nBufferCountActual; 1083 *bufferSize = def.nBufferSize; 1084 return err; 1085} 1086 1087status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1088 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1089 status_t err = configureOutputBuffersFromNativeWindow( 1090 &bufferCount, &bufferSize, &minUndequeuedBuffers, true /* preregister */); 1091 if (err != 0) 1092 return err; 1093 mNumUndequeuedBuffers = minUndequeuedBuffers; 1094 1095 if (!storingMetadataInDecodedBuffers()) { 1096 static_cast<Surface*>(mNativeWindow.get()) 1097 ->getIGraphicBufferProducer()->allowAllocation(true); 1098 } 1099 1100 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1101 "output port", 1102 mComponentName.c_str(), bufferCount, bufferSize); 1103 1104 // Dequeue buffers and send them to OMX 1105 for (OMX_U32 i = 0; i < bufferCount; i++) { 1106 ANativeWindowBuffer *buf; 1107 int fenceFd; 1108 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1109 if (err != 0) { 1110 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1111 break; 1112 } 1113 1114 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1115 BufferInfo info; 1116 info.mStatus = BufferInfo::OWNED_BY_US; 1117 info.mFenceFd = fenceFd; 1118 info.mIsReadFence = false; 1119 info.mRenderInfo = NULL; 1120 info.mGraphicBuffer = graphicBuffer; 1121 mBuffers[kPortIndexOutput].push(info); 1122 1123 IOMX::buffer_id bufferId; 1124 err = mOMXNode->useGraphicBuffer( 1125 kPortIndexOutput, graphicBuffer, &bufferId); 1126 if (err != 0) { 1127 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1128 "%d", i, err); 1129 break; 1130 } 1131 1132 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1133 1134 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1135 mComponentName.c_str(), 1136 bufferId, graphicBuffer.get()); 1137 } 1138 1139 OMX_U32 cancelStart; 1140 OMX_U32 cancelEnd; 1141 1142 if (err != 0) { 1143 // If an error occurred while dequeuing we need to cancel any buffers 1144 // that were dequeued. 1145 cancelStart = 0; 1146 cancelEnd = mBuffers[kPortIndexOutput].size(); 1147 } else { 1148 // Return the required minimum undequeued buffers to the native window. 1149 cancelStart = bufferCount - minUndequeuedBuffers; 1150 cancelEnd = bufferCount; 1151 } 1152 1153 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1154 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1155 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1156 status_t error = cancelBufferToNativeWindow(info); 1157 if (err == 0) { 1158 err = error; 1159 } 1160 } 1161 } 1162 1163 if (!storingMetadataInDecodedBuffers()) { 1164 static_cast<Surface*>(mNativeWindow.get()) 1165 ->getIGraphicBufferProducer()->allowAllocation(false); 1166 } 1167 1168 return err; 1169} 1170 1171status_t ACodec::allocateOutputMetadataBuffers() { 1172 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1173 status_t err = configureOutputBuffersFromNativeWindow( 1174 &bufferCount, &bufferSize, &minUndequeuedBuffers, 1175 mLegacyAdaptiveExperiment /* preregister */); 1176 if (err != 0) 1177 return err; 1178 mNumUndequeuedBuffers = minUndequeuedBuffers; 1179 1180 ALOGV("[%s] Allocating %u meta buffers on output port", 1181 mComponentName.c_str(), bufferCount); 1182 1183 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1184 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1185 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1186 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1187 1188 // Dequeue buffers and send them to OMX 1189 for (OMX_U32 i = 0; i < bufferCount; i++) { 1190 BufferInfo info; 1191 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1192 info.mFenceFd = -1; 1193 info.mRenderInfo = NULL; 1194 info.mGraphicBuffer = NULL; 1195 info.mDequeuedAt = mDequeueCounter; 1196 1197 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1198 if (mem == NULL || mem->pointer() == NULL) { 1199 return NO_MEMORY; 1200 } 1201 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1202 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1203 } 1204 info.mData = new SharedMemoryBuffer(mOutputFormat, mem); 1205 info.mMemRef = mem; 1206 info.mCodecData = info.mData; 1207 info.mCodecRef = mem; 1208 1209 err = mOMXNode->useBuffer( 1210 kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1211 mBuffers[kPortIndexOutput].push(info); 1212 1213 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1214 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1215 } 1216 1217 if (mLegacyAdaptiveExperiment) { 1218 // preallocate and preregister buffers 1219 static_cast<Surface *>(mNativeWindow.get()) 1220 ->getIGraphicBufferProducer()->allowAllocation(true); 1221 1222 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1223 "output port", 1224 mComponentName.c_str(), bufferCount, bufferSize); 1225 1226 // Dequeue buffers then cancel them all 1227 for (OMX_U32 i = 0; i < bufferCount; i++) { 1228 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1229 1230 ANativeWindowBuffer *buf; 1231 int fenceFd; 1232 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1233 if (err != 0) { 1234 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1235 break; 1236 } 1237 1238 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1239 mOMXNode->updateGraphicBufferInMeta( 1240 kPortIndexOutput, graphicBuffer, info->mBufferID); 1241 info->mStatus = BufferInfo::OWNED_BY_US; 1242 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1243 info->mGraphicBuffer = graphicBuffer; 1244 } 1245 1246 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1247 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1248 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1249 status_t error = cancelBufferToNativeWindow(info); 1250 if (err == OK) { 1251 err = error; 1252 } 1253 } 1254 } 1255 1256 static_cast<Surface*>(mNativeWindow.get()) 1257 ->getIGraphicBufferProducer()->allowAllocation(false); 1258 } 1259 1260 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1261 return err; 1262} 1263 1264status_t ACodec::submitOutputMetadataBuffer() { 1265 CHECK(storingMetadataInDecodedBuffers()); 1266 if (mMetadataBuffersToSubmit == 0) 1267 return OK; 1268 1269 BufferInfo *info = dequeueBufferFromNativeWindow(); 1270 if (info == NULL) { 1271 return ERROR_IO; 1272 } 1273 1274 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1275 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1276 1277 --mMetadataBuffersToSubmit; 1278 info->checkWriteFence("submitOutputMetadataBuffer"); 1279 status_t err = mOMXNode->fillBuffer(info->mBufferID, info->mFenceFd); 1280 info->mFenceFd = -1; 1281 if (err == OK) { 1282 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1283 } 1284 1285 return err; 1286} 1287 1288status_t ACodec::waitForFence(int fd, const char *dbg ) { 1289 status_t res = OK; 1290 if (fd >= 0) { 1291 sp<Fence> fence = new Fence(fd); 1292 res = fence->wait(IOMX::kFenceTimeoutMs); 1293 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1294 } 1295 return res; 1296} 1297 1298// static 1299const char *ACodec::_asString(BufferInfo::Status s) { 1300 switch (s) { 1301 case BufferInfo::OWNED_BY_US: return "OUR"; 1302 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1303 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1304 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1305 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1306 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1307 default: return "?"; 1308 } 1309} 1310 1311void ACodec::dumpBuffers(OMX_U32 portIndex) { 1312 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1313 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1314 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1315 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1316 const BufferInfo &info = mBuffers[portIndex][i]; 1317 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1318 i, info.mBufferID, info.mGraphicBuffer.get(), 1319 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1320 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1321 } 1322} 1323 1324status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1325 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1326 1327 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1328 mComponentName.c_str(), info->mBufferID); 1329 1330 info->checkWriteFence("cancelBufferToNativeWindow"); 1331 int err = mNativeWindow->cancelBuffer( 1332 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1333 info->mFenceFd = -1; 1334 1335 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1336 mComponentName.c_str(), info->mBufferID); 1337 // change ownership even if cancelBuffer fails 1338 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1339 1340 return err; 1341} 1342 1343void ACodec::updateRenderInfoForDequeuedBuffer( 1344 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1345 1346 info->mRenderInfo = 1347 mRenderTracker.updateInfoForDequeuedBuffer( 1348 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1349 1350 // check for any fences already signaled 1351 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1352} 1353 1354void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1355 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1356 mRenderTracker.dumpRenderQueue(); 1357 } 1358} 1359 1360void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1361 sp<AMessage> msg = mNotify->dup(); 1362 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1363 std::list<FrameRenderTracker::Info> done = 1364 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1365 1366 // unlink untracked frames 1367 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1368 it != done.cend(); ++it) { 1369 ssize_t index = it->getIndex(); 1370 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1371 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1372 } else if (index >= 0) { 1373 // THIS SHOULD NEVER HAPPEN 1374 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1375 } 1376 } 1377 1378 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1379 msg->post(); 1380 } 1381} 1382 1383ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1384 ANativeWindowBuffer *buf; 1385 CHECK(mNativeWindow.get() != NULL); 1386 1387 if (mTunneled) { 1388 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1389 " video playback mode mode!"); 1390 return NULL; 1391 } 1392 1393 if (mFatalError) { 1394 ALOGW("not dequeuing from native window due to fatal error"); 1395 return NULL; 1396 } 1397 1398 int fenceFd = -1; 1399 do { 1400 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1401 if (err != 0) { 1402 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1403 return NULL; 1404 } 1405 1406 bool stale = false; 1407 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1408 i--; 1409 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1410 1411 if (info->mGraphicBuffer != NULL && 1412 info->mGraphicBuffer->handle == buf->handle) { 1413 // Since consumers can attach buffers to BufferQueues, it is possible 1414 // that a known yet stale buffer can return from a surface that we 1415 // once used. We can simply ignore this as we have already dequeued 1416 // this buffer properly. NOTE: this does not eliminate all cases, 1417 // e.g. it is possible that we have queued the valid buffer to the 1418 // NW, and a stale copy of the same buffer gets dequeued - which will 1419 // be treated as the valid buffer by ACodec. 1420 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1421 ALOGI("dequeued stale buffer %p. discarding", buf); 1422 stale = true; 1423 break; 1424 } 1425 1426 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1427 info->mStatus = BufferInfo::OWNED_BY_US; 1428 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1429 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1430 return info; 1431 } 1432 } 1433 1434 // It is also possible to receive a previously unregistered buffer 1435 // in non-meta mode. These should be treated as stale buffers. The 1436 // same is possible in meta mode, in which case, it will be treated 1437 // as a normal buffer, which is not desirable. 1438 // TODO: fix this. 1439 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1440 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1441 stale = true; 1442 } 1443 if (stale) { 1444 // TODO: detach stale buffer, but there is no API yet to do it. 1445 buf = NULL; 1446 } 1447 } while (buf == NULL); 1448 1449 // get oldest undequeued buffer 1450 BufferInfo *oldest = NULL; 1451 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1452 i--; 1453 BufferInfo *info = 1454 &mBuffers[kPortIndexOutput].editItemAt(i); 1455 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1456 (oldest == NULL || 1457 // avoid potential issues from counter rolling over 1458 mDequeueCounter - info->mDequeuedAt > 1459 mDequeueCounter - oldest->mDequeuedAt)) { 1460 oldest = info; 1461 } 1462 } 1463 1464 // it is impossible dequeue a buffer when there are no buffers with ANW 1465 CHECK(oldest != NULL); 1466 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1467 // while loop above does not complete 1468 CHECK(storingMetadataInDecodedBuffers()); 1469 1470 // discard buffer in LRU info and replace with new buffer 1471 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1472 oldest->mStatus = BufferInfo::OWNED_BY_US; 1473 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1474 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1475 oldest->mRenderInfo = NULL; 1476 1477 mOMXNode->updateGraphicBufferInMeta( 1478 kPortIndexOutput, oldest->mGraphicBuffer, oldest->mBufferID); 1479 1480 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1481 VideoGrallocMetadata *grallocMeta = 1482 reinterpret_cast<VideoGrallocMetadata *>(oldest->mCodecData->base()); 1483 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1484 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1485 mDequeueCounter - oldest->mDequeuedAt, 1486 (void *)(uintptr_t)grallocMeta->pHandle, 1487 oldest->mGraphicBuffer->handle, oldest->mCodecData->base()); 1488 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1489 VideoNativeMetadata *nativeMeta = 1490 reinterpret_cast<VideoNativeMetadata *>(oldest->mCodecData->base()); 1491 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1492 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1493 mDequeueCounter - oldest->mDequeuedAt, 1494 (void *)(uintptr_t)nativeMeta->pBuffer, 1495 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mCodecData->base()); 1496 } 1497 1498 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1499 return oldest; 1500} 1501 1502status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1503 status_t err = OK; 1504 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1505 i--; 1506 status_t err2 = freeBuffer(portIndex, i); 1507 if (err == OK) { 1508 err = err2; 1509 } 1510 } 1511 1512 // clear mDealer even on an error 1513 mDealer[portIndex].clear(); 1514 return err; 1515} 1516 1517status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1518 status_t err = OK; 1519 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1520 i--; 1521 BufferInfo *info = 1522 &mBuffers[kPortIndexOutput].editItemAt(i); 1523 1524 // At this time some buffers may still be with the component 1525 // or being drained. 1526 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1527 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1528 status_t err2 = freeBuffer(kPortIndexOutput, i); 1529 if (err == OK) { 1530 err = err2; 1531 } 1532 } 1533 } 1534 1535 return err; 1536} 1537 1538status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1539 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1540 status_t err = OK; 1541 1542 // there should not be any fences in the metadata 1543 MetadataBufferType type = 1544 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1545 if (type == kMetadataBufferTypeANWBuffer && info->mCodecData != NULL 1546 && info->mCodecData->size() >= sizeof(VideoNativeMetadata)) { 1547 int fenceFd = ((VideoNativeMetadata *)info->mCodecData->base())->nFenceFd; 1548 if (fenceFd >= 0) { 1549 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1550 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1551 } 1552 } 1553 1554 switch (info->mStatus) { 1555 case BufferInfo::OWNED_BY_US: 1556 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1557 (void)cancelBufferToNativeWindow(info); 1558 } 1559 // fall through 1560 1561 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1562 err = mOMXNode->freeBuffer(portIndex, info->mBufferID); 1563 break; 1564 1565 default: 1566 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1567 err = FAILED_TRANSACTION; 1568 break; 1569 } 1570 1571 if (info->mFenceFd >= 0) { 1572 ::close(info->mFenceFd); 1573 } 1574 1575 if (portIndex == kPortIndexOutput) { 1576 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1577 info->mRenderInfo = NULL; 1578 } 1579 1580 // remove buffer even if mOMXNode->freeBuffer fails 1581 mBuffers[portIndex].removeAt(i); 1582 return err; 1583} 1584 1585ACodec::BufferInfo *ACodec::findBufferByID( 1586 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1587 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1588 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1589 1590 if (info->mBufferID == bufferID) { 1591 if (index != NULL) { 1592 *index = i; 1593 } 1594 return info; 1595 } 1596 } 1597 1598 ALOGE("Could not find buffer with ID %u", bufferID); 1599 return NULL; 1600} 1601 1602status_t ACodec::setComponentRole( 1603 bool isEncoder, const char *mime) { 1604 const char *role = GetComponentRole(isEncoder, mime); 1605 if (role == NULL) { 1606 return BAD_VALUE; 1607 } 1608 status_t err = SetComponentRole(mOMXNode, role); 1609 if (err != OK) { 1610 ALOGW("[%s] Failed to set standard component role '%s'.", 1611 mComponentName.c_str(), role); 1612 } 1613 return err; 1614} 1615 1616status_t ACodec::configureCodec( 1617 const char *mime, const sp<AMessage> &msg) { 1618 int32_t encoder; 1619 if (!msg->findInt32("encoder", &encoder)) { 1620 encoder = false; 1621 } 1622 1623 sp<AMessage> inputFormat = new AMessage; 1624 sp<AMessage> outputFormat = new AMessage; 1625 mConfigFormat = msg; 1626 1627 mIsEncoder = encoder; 1628 1629 mInputMetadataType = kMetadataBufferTypeInvalid; 1630 mOutputMetadataType = kMetadataBufferTypeInvalid; 1631 1632 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1633 1634 if (err != OK) { 1635 return err; 1636 } 1637 1638 int32_t bitRate = 0; 1639 // FLAC encoder doesn't need a bitrate, other encoders do 1640 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1641 && !msg->findInt32("bitrate", &bitRate)) { 1642 return INVALID_OPERATION; 1643 } 1644 1645 // propagate bitrate to the output so that the muxer has it 1646 if (encoder && msg->findInt32("bitrate", &bitRate)) { 1647 // Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the 1648 // average bitrate. We've been setting both bitrate and max-bitrate to this same value. 1649 outputFormat->setInt32("bitrate", bitRate); 1650 outputFormat->setInt32("max-bitrate", bitRate); 1651 } 1652 1653 int32_t storeMeta; 1654 if (encoder 1655 && msg->findInt32("android._input-metadata-buffer-type", &storeMeta) 1656 && storeMeta != kMetadataBufferTypeInvalid) { 1657 mInputMetadataType = (MetadataBufferType)storeMeta; 1658 err = mOMXNode->storeMetaDataInBuffers( 1659 kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1660 if (err != OK) { 1661 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1662 mComponentName.c_str(), err); 1663 1664 return err; 1665 } else if (storeMeta == kMetadataBufferTypeANWBuffer 1666 && mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1667 // IOMX translates ANWBuffers to gralloc source already. 1668 mInputMetadataType = (MetadataBufferType)storeMeta; 1669 } 1670 1671 uint32_t usageBits; 1672 if (mOMXNode->getParameter( 1673 (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1674 &usageBits, sizeof(usageBits)) == OK) { 1675 inputFormat->setInt32( 1676 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1677 } 1678 } 1679 1680 int32_t prependSPSPPS = 0; 1681 if (encoder 1682 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1683 && prependSPSPPS != 0) { 1684 OMX_INDEXTYPE index; 1685 err = mOMXNode->getExtensionIndex( 1686 "OMX.google.android.index.prependSPSPPSToIDRFrames", &index); 1687 1688 if (err == OK) { 1689 PrependSPSPPSToIDRFramesParams params; 1690 InitOMXParams(¶ms); 1691 params.bEnable = OMX_TRUE; 1692 1693 err = mOMXNode->setParameter(index, ¶ms, sizeof(params)); 1694 } 1695 1696 if (err != OK) { 1697 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1698 "IDR frames. (err %d)", err); 1699 1700 return err; 1701 } 1702 } 1703 1704 // Only enable metadata mode on encoder output if encoder can prepend 1705 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1706 // opaque handle, to which we don't have access. 1707 int32_t video = !strncasecmp(mime, "video/", 6); 1708 mIsVideo = video; 1709 if (encoder && video) { 1710 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1711 && msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta) 1712 && storeMeta != 0); 1713 1714 mOutputMetadataType = kMetadataBufferTypeNativeHandleSource; 1715 err = mOMXNode->storeMetaDataInBuffers(kPortIndexOutput, enable, &mOutputMetadataType); 1716 if (err != OK) { 1717 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1718 mComponentName.c_str(), err); 1719 } 1720 1721 if (!msg->findInt64( 1722 "repeat-previous-frame-after", 1723 &mRepeatFrameDelayUs)) { 1724 mRepeatFrameDelayUs = -1ll; 1725 } 1726 1727 // only allow 32-bit value, since we pass it as U32 to OMX. 1728 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1729 mMaxPtsGapUs = -1ll; 1730 } else if (mMaxPtsGapUs > INT32_MAX || mMaxPtsGapUs < 0) { 1731 ALOGW("Unsupported value for max pts gap %lld", (long long) mMaxPtsGapUs); 1732 mMaxPtsGapUs = -1ll; 1733 } 1734 1735 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1736 mMaxFps = -1; 1737 } 1738 1739 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1740 mTimePerCaptureUs = -1ll; 1741 } 1742 1743 if (!msg->findInt32( 1744 "create-input-buffers-suspended", 1745 (int32_t*)&mCreateInputBuffersSuspended)) { 1746 mCreateInputBuffersSuspended = false; 1747 } 1748 } 1749 1750 // NOTE: we only use native window for video decoders 1751 sp<RefBase> obj; 1752 bool haveNativeWindow = msg->findObject("native-window", &obj) 1753 && obj != NULL && video && !encoder; 1754 mUsingNativeWindow = haveNativeWindow; 1755 mLegacyAdaptiveExperiment = false; 1756 if (video && !encoder) { 1757 inputFormat->setInt32("adaptive-playback", false); 1758 1759 int32_t usageProtected; 1760 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1761 if (!haveNativeWindow) { 1762 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1763 return PERMISSION_DENIED; 1764 } 1765 mFlags |= kFlagIsGrallocUsageProtected; 1766 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1767 } 1768 1769 if (mFlags & kFlagIsSecure) { 1770 // use native_handles for secure input buffers 1771 err = mOMXNode->enableNativeBuffers( 1772 kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1773 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1774 err = OK; // ignore error for now 1775 } 1776 } 1777 if (haveNativeWindow) { 1778 sp<ANativeWindow> nativeWindow = 1779 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1780 1781 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1782 int32_t autoFrc; 1783 if (msg->findInt32("auto-frc", &autoFrc)) { 1784 bool enabled = autoFrc; 1785 OMX_CONFIG_BOOLEANTYPE config; 1786 InitOMXParams(&config); 1787 config.bEnabled = (OMX_BOOL)enabled; 1788 status_t temp = mOMXNode->setConfig( 1789 (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1790 &config, sizeof(config)); 1791 if (temp == OK) { 1792 outputFormat->setInt32("auto-frc", enabled); 1793 } else if (enabled) { 1794 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1795 } 1796 } 1797 // END of temporary support for automatic FRC 1798 1799 int32_t tunneled; 1800 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1801 tunneled != 0) { 1802 ALOGI("Configuring TUNNELED video playback."); 1803 mTunneled = true; 1804 1805 int32_t audioHwSync = 0; 1806 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1807 ALOGW("No Audio HW Sync provided for video tunnel"); 1808 } 1809 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1810 if (err != OK) { 1811 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1812 audioHwSync, nativeWindow.get()); 1813 return err; 1814 } 1815 1816 int32_t maxWidth = 0, maxHeight = 0; 1817 if (msg->findInt32("max-width", &maxWidth) && 1818 msg->findInt32("max-height", &maxHeight)) { 1819 1820 err = mOMXNode->prepareForAdaptivePlayback( 1821 kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1822 if (err != OK) { 1823 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1824 mComponentName.c_str(), err); 1825 // allow failure 1826 err = OK; 1827 } else { 1828 inputFormat->setInt32("max-width", maxWidth); 1829 inputFormat->setInt32("max-height", maxHeight); 1830 inputFormat->setInt32("adaptive-playback", true); 1831 } 1832 } 1833 } else { 1834 ALOGV("Configuring CPU controlled video playback."); 1835 mTunneled = false; 1836 1837 // Explicity reset the sideband handle of the window for 1838 // non-tunneled video in case the window was previously used 1839 // for a tunneled video playback. 1840 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1841 if (err != OK) { 1842 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1843 return err; 1844 } 1845 1846 // Always try to enable dynamic output buffers on native surface 1847 mOutputMetadataType = kMetadataBufferTypeANWBuffer; 1848 err = mOMXNode->storeMetaDataInBuffers( 1849 kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1850 if (err != OK) { 1851 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1852 mComponentName.c_str(), err); 1853 1854 // if adaptive playback has been requested, try JB fallback 1855 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1856 // LARGE MEMORY REQUIREMENT 1857 1858 // we will not do adaptive playback on software accessed 1859 // surfaces as they never had to respond to changes in the 1860 // crop window, and we don't trust that they will be able to. 1861 int usageBits = 0; 1862 bool canDoAdaptivePlayback; 1863 1864 if (nativeWindow->query( 1865 nativeWindow.get(), 1866 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1867 &usageBits) != OK) { 1868 canDoAdaptivePlayback = false; 1869 } else { 1870 canDoAdaptivePlayback = 1871 (usageBits & 1872 (GRALLOC_USAGE_SW_READ_MASK | 1873 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1874 } 1875 1876 int32_t maxWidth = 0, maxHeight = 0; 1877 if (canDoAdaptivePlayback && 1878 msg->findInt32("max-width", &maxWidth) && 1879 msg->findInt32("max-height", &maxHeight)) { 1880 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1881 mComponentName.c_str(), maxWidth, maxHeight); 1882 1883 err = mOMXNode->prepareForAdaptivePlayback( 1884 kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1885 ALOGW_IF(err != OK, 1886 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1887 mComponentName.c_str(), err); 1888 1889 if (err == OK) { 1890 inputFormat->setInt32("max-width", maxWidth); 1891 inputFormat->setInt32("max-height", maxHeight); 1892 inputFormat->setInt32("adaptive-playback", true); 1893 } 1894 } 1895 // allow failure 1896 err = OK; 1897 } else { 1898 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1899 mComponentName.c_str()); 1900 CHECK(storingMetadataInDecodedBuffers()); 1901 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 1902 "legacy-adaptive", !msg->contains("no-experiments")); 1903 1904 inputFormat->setInt32("adaptive-playback", true); 1905 } 1906 1907 int32_t push; 1908 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 1909 && push != 0) { 1910 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1911 } 1912 } 1913 1914 int32_t rotationDegrees; 1915 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 1916 mRotationDegrees = rotationDegrees; 1917 } else { 1918 mRotationDegrees = 0; 1919 } 1920 } 1921 1922 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 1923 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 1924 // invalid encodings will default to PCM-16bit in setupRawAudioFormat. 1925 1926 if (video) { 1927 // determine need for software renderer 1928 bool usingSwRenderer = false; 1929 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 1930 usingSwRenderer = true; 1931 haveNativeWindow = false; 1932 } 1933 1934 if (encoder) { 1935 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 1936 } else { 1937 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 1938 } 1939 1940 if (err != OK) { 1941 return err; 1942 } 1943 1944 if (haveNativeWindow) { 1945 mNativeWindow = static_cast<Surface *>(obj.get()); 1946 } 1947 1948 // initialize native window now to get actual output format 1949 // TODO: this is needed for some encoders even though they don't use native window 1950 err = initNativeWindow(); 1951 if (err != OK) { 1952 return err; 1953 } 1954 1955 // fallback for devices that do not handle flex-YUV for native buffers 1956 if (haveNativeWindow) { 1957 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 1958 if (msg->findInt32("color-format", &requestedColorFormat) && 1959 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 1960 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 1961 if (err != OK) { 1962 return err; 1963 } 1964 int32_t colorFormat = OMX_COLOR_FormatUnused; 1965 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 1966 if (!outputFormat->findInt32("color-format", &colorFormat)) { 1967 ALOGE("ouptut port did not have a color format (wrong domain?)"); 1968 return BAD_VALUE; 1969 } 1970 ALOGD("[%s] Requested output format %#x and got %#x.", 1971 mComponentName.c_str(), requestedColorFormat, colorFormat); 1972 if (!IsFlexibleColorFormat( 1973 mOMXNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 1974 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 1975 // device did not handle flex-YUV request for native window, fall back 1976 // to SW renderer 1977 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 1978 mNativeWindow.clear(); 1979 mNativeWindowUsageBits = 0; 1980 haveNativeWindow = false; 1981 usingSwRenderer = true; 1982 if (storingMetadataInDecodedBuffers()) { 1983 err = mOMXNode->storeMetaDataInBuffers( 1984 kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 1985 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 1986 // TODO: implement adaptive-playback support for bytebuffer mode. 1987 // This is done by SW codecs, but most HW codecs don't support it. 1988 inputFormat->setInt32("adaptive-playback", false); 1989 } 1990 if (err == OK) { 1991 err = mOMXNode->enableNativeBuffers( 1992 kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 1993 } 1994 if (mFlags & kFlagIsGrallocUsageProtected) { 1995 // fallback is not supported for protected playback 1996 err = PERMISSION_DENIED; 1997 } else if (err == OK) { 1998 err = setupVideoDecoder( 1999 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2000 } 2001 } 2002 } 2003 } 2004 2005 if (usingSwRenderer) { 2006 outputFormat->setInt32("using-sw-renderer", 1); 2007 } 2008 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2009 int32_t numChannels, sampleRate; 2010 if (!msg->findInt32("channel-count", &numChannels) 2011 || !msg->findInt32("sample-rate", &sampleRate)) { 2012 // Since we did not always check for these, leave them optional 2013 // and have the decoder figure it all out. 2014 err = OK; 2015 } else { 2016 err = setupRawAudioFormat( 2017 encoder ? kPortIndexInput : kPortIndexOutput, 2018 sampleRate, 2019 numChannels); 2020 } 2021 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2022 int32_t numChannels, sampleRate; 2023 if (!msg->findInt32("channel-count", &numChannels) 2024 || !msg->findInt32("sample-rate", &sampleRate)) { 2025 err = INVALID_OPERATION; 2026 } else { 2027 int32_t isADTS, aacProfile; 2028 int32_t sbrMode; 2029 int32_t maxOutputChannelCount; 2030 int32_t pcmLimiterEnable; 2031 drcParams_t drc; 2032 if (!msg->findInt32("is-adts", &isADTS)) { 2033 isADTS = 0; 2034 } 2035 if (!msg->findInt32("aac-profile", &aacProfile)) { 2036 aacProfile = OMX_AUDIO_AACObjectNull; 2037 } 2038 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2039 sbrMode = -1; 2040 } 2041 2042 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2043 maxOutputChannelCount = -1; 2044 } 2045 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2046 // value is unknown 2047 pcmLimiterEnable = -1; 2048 } 2049 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2050 // value is unknown 2051 drc.encodedTargetLevel = -1; 2052 } 2053 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2054 // value is unknown 2055 drc.drcCut = -1; 2056 } 2057 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2058 // value is unknown 2059 drc.drcBoost = -1; 2060 } 2061 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2062 // value is unknown 2063 drc.heavyCompression = -1; 2064 } 2065 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2066 // value is unknown 2067 drc.targetRefLevel = -1; 2068 } 2069 2070 err = setupAACCodec( 2071 encoder, numChannels, sampleRate, bitRate, aacProfile, 2072 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2073 pcmLimiterEnable); 2074 } 2075 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2076 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2077 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2078 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2079 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2080 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2081 // These are PCM-like formats with a fixed sample rate but 2082 // a variable number of channels. 2083 2084 int32_t numChannels; 2085 if (!msg->findInt32("channel-count", &numChannels)) { 2086 err = INVALID_OPERATION; 2087 } else { 2088 int32_t sampleRate; 2089 if (!msg->findInt32("sample-rate", &sampleRate)) { 2090 sampleRate = 8000; 2091 } 2092 err = setupG711Codec(encoder, sampleRate, numChannels); 2093 } 2094 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2095 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2096 if (encoder && 2097 (!msg->findInt32("channel-count", &numChannels) 2098 || !msg->findInt32("sample-rate", &sampleRate))) { 2099 ALOGE("missing channel count or sample rate for FLAC encoder"); 2100 err = INVALID_OPERATION; 2101 } else { 2102 if (encoder) { 2103 if (!msg->findInt32( 2104 "complexity", &compressionLevel) && 2105 !msg->findInt32( 2106 "flac-compression-level", &compressionLevel)) { 2107 compressionLevel = 5; // default FLAC compression level 2108 } else if (compressionLevel < 0) { 2109 ALOGW("compression level %d outside [0..8] range, " 2110 "using 0", 2111 compressionLevel); 2112 compressionLevel = 0; 2113 } else if (compressionLevel > 8) { 2114 ALOGW("compression level %d outside [0..8] range, " 2115 "using 8", 2116 compressionLevel); 2117 compressionLevel = 8; 2118 } 2119 } 2120 err = setupFlacCodec( 2121 encoder, numChannels, sampleRate, compressionLevel); 2122 } 2123 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2124 int32_t numChannels, sampleRate; 2125 if (encoder 2126 || !msg->findInt32("channel-count", &numChannels) 2127 || !msg->findInt32("sample-rate", &sampleRate)) { 2128 err = INVALID_OPERATION; 2129 } else { 2130 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); 2131 } 2132 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2133 int32_t numChannels; 2134 int32_t sampleRate; 2135 if (!msg->findInt32("channel-count", &numChannels) 2136 || !msg->findInt32("sample-rate", &sampleRate)) { 2137 err = INVALID_OPERATION; 2138 } else { 2139 err = setupAC3Codec(encoder, numChannels, sampleRate); 2140 } 2141 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2142 int32_t numChannels; 2143 int32_t sampleRate; 2144 if (!msg->findInt32("channel-count", &numChannels) 2145 || !msg->findInt32("sample-rate", &sampleRate)) { 2146 err = INVALID_OPERATION; 2147 } else { 2148 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2149 } 2150 } 2151 2152 if (err != OK) { 2153 return err; 2154 } 2155 2156 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2157 mEncoderDelay = 0; 2158 } 2159 2160 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2161 mEncoderPadding = 0; 2162 } 2163 2164 if (msg->findInt32("channel-mask", &mChannelMask)) { 2165 mChannelMaskPresent = true; 2166 } else { 2167 mChannelMaskPresent = false; 2168 } 2169 2170 int32_t maxInputSize; 2171 if (msg->findInt32("max-input-size", &maxInputSize)) { 2172 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2173 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2174 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2175 } 2176 2177 int32_t priority; 2178 if (msg->findInt32("priority", &priority)) { 2179 err = setPriority(priority); 2180 } 2181 2182 int32_t rateInt = -1; 2183 float rateFloat = -1; 2184 if (!msg->findFloat("operating-rate", &rateFloat)) { 2185 msg->findInt32("operating-rate", &rateInt); 2186 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2187 } 2188 if (rateFloat > 0) { 2189 err = setOperatingRate(rateFloat, video); 2190 } 2191 2192 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame. 2193 mBaseOutputFormat = outputFormat; 2194 mLastOutputFormat.clear(); 2195 2196 err = getPortFormat(kPortIndexInput, inputFormat); 2197 if (err == OK) { 2198 err = getPortFormat(kPortIndexOutput, outputFormat); 2199 if (err == OK) { 2200 mInputFormat = inputFormat; 2201 mOutputFormat = outputFormat; 2202 } 2203 } 2204 2205 // create data converters if needed 2206 if (!video && err == OK) { 2207 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 2208 if (encoder) { 2209 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2210 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); 2211 if (mConverter[kPortIndexInput] != NULL) { 2212 mInputFormat->setInt32("pcm-encoding", pcmEncoding); 2213 } 2214 } else { 2215 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2216 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 2217 if (mConverter[kPortIndexOutput] != NULL) { 2218 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 2219 } 2220 } 2221 } 2222 2223 return err; 2224} 2225 2226status_t ACodec::setPriority(int32_t priority) { 2227 if (priority < 0) { 2228 return BAD_VALUE; 2229 } 2230 OMX_PARAM_U32TYPE config; 2231 InitOMXParams(&config); 2232 config.nU32 = (OMX_U32)priority; 2233 status_t temp = mOMXNode->setConfig( 2234 (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2235 &config, sizeof(config)); 2236 if (temp != OK) { 2237 ALOGI("codec does not support config priority (err %d)", temp); 2238 } 2239 return OK; 2240} 2241 2242status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2243 if (rateFloat < 0) { 2244 return BAD_VALUE; 2245 } 2246 OMX_U32 rate; 2247 if (isVideo) { 2248 if (rateFloat > 65535) { 2249 return BAD_VALUE; 2250 } 2251 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2252 } else { 2253 if (rateFloat > UINT_MAX) { 2254 return BAD_VALUE; 2255 } 2256 rate = (OMX_U32)(rateFloat); 2257 } 2258 OMX_PARAM_U32TYPE config; 2259 InitOMXParams(&config); 2260 config.nU32 = rate; 2261 status_t err = mOMXNode->setConfig( 2262 (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2263 &config, sizeof(config)); 2264 if (err != OK) { 2265 ALOGI("codec does not support config operating rate (err %d)", err); 2266 } 2267 return OK; 2268} 2269 2270status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2271 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2272 InitOMXParams(¶ms); 2273 params.nPortIndex = kPortIndexOutput; 2274 status_t err = mOMXNode->getConfig( 2275 (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2276 if (err == OK) { 2277 *intraRefreshPeriod = params.nRefreshPeriod; 2278 return OK; 2279 } 2280 2281 // Fallback to query through standard OMX index. 2282 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2283 InitOMXParams(&refreshParams); 2284 refreshParams.nPortIndex = kPortIndexOutput; 2285 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2286 err = mOMXNode->getParameter( 2287 OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2288 if (err != OK || refreshParams.nCirMBs == 0) { 2289 *intraRefreshPeriod = 0; 2290 return OK; 2291 } 2292 2293 // Calculate period based on width and height 2294 uint32_t width, height; 2295 OMX_PARAM_PORTDEFINITIONTYPE def; 2296 InitOMXParams(&def); 2297 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2298 def.nPortIndex = kPortIndexOutput; 2299 err = mOMXNode->getParameter( 2300 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2301 if (err != OK) { 2302 *intraRefreshPeriod = 0; 2303 return err; 2304 } 2305 width = video_def->nFrameWidth; 2306 height = video_def->nFrameHeight; 2307 // Use H.264/AVC MacroBlock size 16x16 2308 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2309 2310 return OK; 2311} 2312 2313status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2314 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2315 InitOMXParams(¶ms); 2316 params.nPortIndex = kPortIndexOutput; 2317 params.nRefreshPeriod = intraRefreshPeriod; 2318 status_t err = mOMXNode->setConfig( 2319 (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2320 if (err == OK) { 2321 return OK; 2322 } 2323 2324 // Only in configure state, a component could invoke setParameter. 2325 if (!inConfigure) { 2326 return INVALID_OPERATION; 2327 } else { 2328 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2329 } 2330 2331 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2332 InitOMXParams(&refreshParams); 2333 refreshParams.nPortIndex = kPortIndexOutput; 2334 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2335 2336 if (intraRefreshPeriod == 0) { 2337 // 0 means disable intra refresh. 2338 refreshParams.nCirMBs = 0; 2339 } else { 2340 // Calculate macroblocks that need to be intra coded base on width and height 2341 uint32_t width, height; 2342 OMX_PARAM_PORTDEFINITIONTYPE def; 2343 InitOMXParams(&def); 2344 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2345 def.nPortIndex = kPortIndexOutput; 2346 err = mOMXNode->getParameter( 2347 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2348 if (err != OK) { 2349 return err; 2350 } 2351 width = video_def->nFrameWidth; 2352 height = video_def->nFrameHeight; 2353 // Use H.264/AVC MacroBlock size 16x16 2354 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2355 } 2356 2357 err = mOMXNode->setParameter( 2358 OMX_IndexParamVideoIntraRefresh, 2359 &refreshParams, sizeof(refreshParams)); 2360 if (err != OK) { 2361 return err; 2362 } 2363 2364 return OK; 2365} 2366 2367status_t ACodec::configureTemporalLayers( 2368 const sp<AMessage> &msg, bool inConfigure, sp<AMessage> &outputFormat) { 2369 if (!mIsVideo || !mIsEncoder) { 2370 return INVALID_OPERATION; 2371 } 2372 2373 AString tsSchema; 2374 if (!msg->findString("ts-schema", &tsSchema)) { 2375 return OK; 2376 } 2377 2378 unsigned int numLayers = 0; 2379 unsigned int numBLayers = 0; 2380 int tags; 2381 char dummy; 2382 OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE pattern = 2383 OMX_VIDEO_AndroidTemporalLayeringPatternNone; 2384 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1 2385 && numLayers > 0) { 2386 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC; 2387 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c", 2388 &numLayers, &dummy, &numBLayers, &dummy)) 2389 && (tags == 1 || (tags == 3 && dummy == '+')) 2390 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) { 2391 numLayers += numBLayers; 2392 pattern = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid; 2393 } else { 2394 ALOGI("Ignoring unsupported ts-schema [%s]", tsSchema.c_str()); 2395 return BAD_VALUE; 2396 } 2397 2398 OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layerParams; 2399 InitOMXParams(&layerParams); 2400 layerParams.nPortIndex = kPortIndexOutput; 2401 2402 status_t err = mOMXNode->getParameter( 2403 (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2404 &layerParams, sizeof(layerParams)); 2405 2406 if (err != OK) { 2407 return err; 2408 } else if (!(layerParams.eSupportedPatterns & pattern)) { 2409 return BAD_VALUE; 2410 } 2411 2412 numLayers = min(numLayers, layerParams.nLayerCountMax); 2413 numBLayers = min(numBLayers, layerParams.nBLayerCountMax); 2414 2415 if (!inConfigure) { 2416 OMX_VIDEO_CONFIG_ANDROID_TEMPORALLAYERINGTYPE layerConfig; 2417 InitOMXParams(&layerConfig); 2418 layerConfig.nPortIndex = kPortIndexOutput; 2419 layerConfig.ePattern = pattern; 2420 layerConfig.nPLayerCountActual = numLayers - numBLayers; 2421 layerConfig.nBLayerCountActual = numBLayers; 2422 layerConfig.bBitrateRatiosSpecified = OMX_FALSE; 2423 2424 err = mOMXNode->setConfig( 2425 (OMX_INDEXTYPE)OMX_IndexConfigAndroidVideoTemporalLayering, 2426 &layerConfig, sizeof(layerConfig)); 2427 } else { 2428 layerParams.ePattern = pattern; 2429 layerParams.nPLayerCountActual = numLayers - numBLayers; 2430 layerParams.nBLayerCountActual = numBLayers; 2431 layerParams.bBitrateRatiosSpecified = OMX_FALSE; 2432 2433 err = mOMXNode->setParameter( 2434 (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2435 &layerParams, sizeof(layerParams)); 2436 } 2437 2438 AString configSchema; 2439 if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternAndroid) { 2440 configSchema = AStringPrintf("android.generic.%u+%u", numLayers - numBLayers, numBLayers); 2441 } else if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC) { 2442 configSchema = AStringPrintf("webrtc.vp8.%u", numLayers); 2443 } 2444 2445 if (err != OK) { 2446 ALOGW("Failed to set temporal layers to %s (requested %s)", 2447 configSchema.c_str(), tsSchema.c_str()); 2448 return err; 2449 } 2450 2451 err = mOMXNode->getParameter( 2452 (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 2453 &layerParams, sizeof(layerParams)); 2454 2455 if (err == OK) { 2456 ALOGD("Temporal layers requested:%s configured:%s got:%s(%u: P=%u, B=%u)", 2457 tsSchema.c_str(), configSchema.c_str(), 2458 asString(layerParams.ePattern), layerParams.ePattern, 2459 layerParams.nPLayerCountActual, layerParams.nBLayerCountActual); 2460 2461 if (outputFormat.get() == mOutputFormat.get()) { 2462 mOutputFormat = mOutputFormat->dup(); // trigger an output format change event 2463 } 2464 // assume we got what we configured 2465 outputFormat->setString("ts-schema", configSchema); 2466 } 2467 return err; 2468} 2469 2470status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2471 OMX_PARAM_PORTDEFINITIONTYPE def; 2472 InitOMXParams(&def); 2473 def.nPortIndex = portIndex; 2474 2475 status_t err = mOMXNode->getParameter( 2476 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2477 2478 if (err != OK) { 2479 return err; 2480 } 2481 2482 if (def.nBufferSize >= size) { 2483 return OK; 2484 } 2485 2486 def.nBufferSize = size; 2487 2488 err = mOMXNode->setParameter( 2489 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2490 2491 if (err != OK) { 2492 return err; 2493 } 2494 2495 err = mOMXNode->getParameter( 2496 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2497 2498 if (err != OK) { 2499 return err; 2500 } 2501 2502 if (def.nBufferSize < size) { 2503 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2504 return FAILED_TRANSACTION; 2505 } 2506 2507 return OK; 2508} 2509 2510status_t ACodec::selectAudioPortFormat( 2511 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2512 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2513 InitOMXParams(&format); 2514 2515 format.nPortIndex = portIndex; 2516 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2517 format.nIndex = index; 2518 status_t err = mOMXNode->getParameter( 2519 OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2520 2521 if (err != OK) { 2522 return err; 2523 } 2524 2525 if (format.eEncoding == desiredFormat) { 2526 break; 2527 } 2528 2529 if (index == kMaxIndicesToCheck) { 2530 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2531 mComponentName.c_str(), index, 2532 asString(format.eEncoding), format.eEncoding); 2533 return ERROR_UNSUPPORTED; 2534 } 2535 } 2536 2537 return mOMXNode->setParameter( 2538 OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2539} 2540 2541status_t ACodec::setupAACCodec( 2542 bool encoder, int32_t numChannels, int32_t sampleRate, 2543 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2544 int32_t maxOutputChannelCount, const drcParams_t& drc, 2545 int32_t pcmLimiterEnable) { 2546 if (encoder && isADTS) { 2547 return -EINVAL; 2548 } 2549 2550 status_t err = setupRawAudioFormat( 2551 encoder ? kPortIndexInput : kPortIndexOutput, 2552 sampleRate, 2553 numChannels); 2554 2555 if (err != OK) { 2556 return err; 2557 } 2558 2559 if (encoder) { 2560 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2561 2562 if (err != OK) { 2563 return err; 2564 } 2565 2566 OMX_PARAM_PORTDEFINITIONTYPE def; 2567 InitOMXParams(&def); 2568 def.nPortIndex = kPortIndexOutput; 2569 2570 err = mOMXNode->getParameter( 2571 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2572 2573 if (err != OK) { 2574 return err; 2575 } 2576 2577 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2578 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2579 2580 err = mOMXNode->setParameter( 2581 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2582 2583 if (err != OK) { 2584 return err; 2585 } 2586 2587 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2588 InitOMXParams(&profile); 2589 profile.nPortIndex = kPortIndexOutput; 2590 2591 err = mOMXNode->getParameter( 2592 OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2593 2594 if (err != OK) { 2595 return err; 2596 } 2597 2598 profile.nChannels = numChannels; 2599 2600 profile.eChannelMode = 2601 (numChannels == 1) 2602 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2603 2604 profile.nSampleRate = sampleRate; 2605 profile.nBitRate = bitRate; 2606 profile.nAudioBandWidth = 0; 2607 profile.nFrameLength = 0; 2608 profile.nAACtools = OMX_AUDIO_AACToolAll; 2609 profile.nAACERtools = OMX_AUDIO_AACERNone; 2610 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2611 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2612 switch (sbrMode) { 2613 case 0: 2614 // disable sbr 2615 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2616 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2617 break; 2618 case 1: 2619 // enable single-rate sbr 2620 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2621 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2622 break; 2623 case 2: 2624 // enable dual-rate sbr 2625 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2626 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2627 break; 2628 case -1: 2629 // enable both modes -> the codec will decide which mode should be used 2630 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2631 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2632 break; 2633 default: 2634 // unsupported sbr mode 2635 return BAD_VALUE; 2636 } 2637 2638 2639 err = mOMXNode->setParameter( 2640 OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2641 2642 if (err != OK) { 2643 return err; 2644 } 2645 2646 return err; 2647 } 2648 2649 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2650 InitOMXParams(&profile); 2651 profile.nPortIndex = kPortIndexInput; 2652 2653 err = mOMXNode->getParameter( 2654 OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2655 2656 if (err != OK) { 2657 return err; 2658 } 2659 2660 profile.nChannels = numChannels; 2661 profile.nSampleRate = sampleRate; 2662 2663 profile.eAACStreamFormat = 2664 isADTS 2665 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2666 : OMX_AUDIO_AACStreamFormatMP4FF; 2667 2668 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2669 InitOMXParams(&presentation); 2670 presentation.nMaxOutputChannels = maxOutputChannelCount; 2671 presentation.nDrcCut = drc.drcCut; 2672 presentation.nDrcBoost = drc.drcBoost; 2673 presentation.nHeavyCompression = drc.heavyCompression; 2674 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2675 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2676 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2677 2678 status_t res = mOMXNode->setParameter( 2679 OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2680 if (res == OK) { 2681 // optional parameters, will not cause configuration failure 2682 mOMXNode->setParameter( 2683 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2684 &presentation, sizeof(presentation)); 2685 } else { 2686 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2687 } 2688 mSampleRate = sampleRate; 2689 return res; 2690} 2691 2692status_t ACodec::setupAC3Codec( 2693 bool encoder, int32_t numChannels, int32_t sampleRate) { 2694 status_t err = setupRawAudioFormat( 2695 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2696 2697 if (err != OK) { 2698 return err; 2699 } 2700 2701 if (encoder) { 2702 ALOGW("AC3 encoding is not supported."); 2703 return INVALID_OPERATION; 2704 } 2705 2706 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2707 InitOMXParams(&def); 2708 def.nPortIndex = kPortIndexInput; 2709 2710 err = mOMXNode->getParameter( 2711 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, &def, sizeof(def)); 2712 2713 if (err != OK) { 2714 return err; 2715 } 2716 2717 def.nChannels = numChannels; 2718 def.nSampleRate = sampleRate; 2719 2720 return mOMXNode->setParameter( 2721 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, &def, sizeof(def)); 2722} 2723 2724status_t ACodec::setupEAC3Codec( 2725 bool encoder, int32_t numChannels, int32_t sampleRate) { 2726 status_t err = setupRawAudioFormat( 2727 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2728 2729 if (err != OK) { 2730 return err; 2731 } 2732 2733 if (encoder) { 2734 ALOGW("EAC3 encoding is not supported."); 2735 return INVALID_OPERATION; 2736 } 2737 2738 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2739 InitOMXParams(&def); 2740 def.nPortIndex = kPortIndexInput; 2741 2742 err = mOMXNode->getParameter( 2743 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, &def, sizeof(def)); 2744 2745 if (err != OK) { 2746 return err; 2747 } 2748 2749 def.nChannels = numChannels; 2750 def.nSampleRate = sampleRate; 2751 2752 return mOMXNode->setParameter( 2753 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, &def, sizeof(def)); 2754} 2755 2756static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2757 bool isAMRWB, int32_t bps) { 2758 if (isAMRWB) { 2759 if (bps <= 6600) { 2760 return OMX_AUDIO_AMRBandModeWB0; 2761 } else if (bps <= 8850) { 2762 return OMX_AUDIO_AMRBandModeWB1; 2763 } else if (bps <= 12650) { 2764 return OMX_AUDIO_AMRBandModeWB2; 2765 } else if (bps <= 14250) { 2766 return OMX_AUDIO_AMRBandModeWB3; 2767 } else if (bps <= 15850) { 2768 return OMX_AUDIO_AMRBandModeWB4; 2769 } else if (bps <= 18250) { 2770 return OMX_AUDIO_AMRBandModeWB5; 2771 } else if (bps <= 19850) { 2772 return OMX_AUDIO_AMRBandModeWB6; 2773 } else if (bps <= 23050) { 2774 return OMX_AUDIO_AMRBandModeWB7; 2775 } 2776 2777 // 23850 bps 2778 return OMX_AUDIO_AMRBandModeWB8; 2779 } else { // AMRNB 2780 if (bps <= 4750) { 2781 return OMX_AUDIO_AMRBandModeNB0; 2782 } else if (bps <= 5150) { 2783 return OMX_AUDIO_AMRBandModeNB1; 2784 } else if (bps <= 5900) { 2785 return OMX_AUDIO_AMRBandModeNB2; 2786 } else if (bps <= 6700) { 2787 return OMX_AUDIO_AMRBandModeNB3; 2788 } else if (bps <= 7400) { 2789 return OMX_AUDIO_AMRBandModeNB4; 2790 } else if (bps <= 7950) { 2791 return OMX_AUDIO_AMRBandModeNB5; 2792 } else if (bps <= 10200) { 2793 return OMX_AUDIO_AMRBandModeNB6; 2794 } 2795 2796 // 12200 bps 2797 return OMX_AUDIO_AMRBandModeNB7; 2798 } 2799} 2800 2801status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2802 OMX_AUDIO_PARAM_AMRTYPE def; 2803 InitOMXParams(&def); 2804 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2805 2806 status_t err = mOMXNode->getParameter( 2807 OMX_IndexParamAudioAmr, &def, sizeof(def)); 2808 2809 if (err != OK) { 2810 return err; 2811 } 2812 2813 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2814 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2815 2816 err = mOMXNode->setParameter( 2817 OMX_IndexParamAudioAmr, &def, sizeof(def)); 2818 2819 if (err != OK) { 2820 return err; 2821 } 2822 2823 return setupRawAudioFormat( 2824 encoder ? kPortIndexInput : kPortIndexOutput, 2825 isWAMR ? 16000 : 8000 /* sampleRate */, 2826 1 /* numChannels */); 2827} 2828 2829status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2830 if (encoder) { 2831 return INVALID_OPERATION; 2832 } 2833 2834 return setupRawAudioFormat( 2835 kPortIndexInput, sampleRate, numChannels); 2836} 2837 2838status_t ACodec::setupFlacCodec( 2839 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2840 2841 if (encoder) { 2842 OMX_AUDIO_PARAM_FLACTYPE def; 2843 InitOMXParams(&def); 2844 def.nPortIndex = kPortIndexOutput; 2845 2846 // configure compression level 2847 status_t err = mOMXNode->getParameter(OMX_IndexParamAudioFlac, &def, sizeof(def)); 2848 if (err != OK) { 2849 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2850 return err; 2851 } 2852 def.nCompressionLevel = compressionLevel; 2853 err = mOMXNode->setParameter(OMX_IndexParamAudioFlac, &def, sizeof(def)); 2854 if (err != OK) { 2855 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2856 return err; 2857 } 2858 } 2859 2860 return setupRawAudioFormat( 2861 encoder ? kPortIndexInput : kPortIndexOutput, 2862 sampleRate, 2863 numChannels); 2864} 2865 2866status_t ACodec::setupRawAudioFormat( 2867 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { 2868 OMX_PARAM_PORTDEFINITIONTYPE def; 2869 InitOMXParams(&def); 2870 def.nPortIndex = portIndex; 2871 2872 status_t err = mOMXNode->getParameter( 2873 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2874 2875 if (err != OK) { 2876 return err; 2877 } 2878 2879 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2880 2881 err = mOMXNode->setParameter( 2882 OMX_IndexParamPortDefinition, &def, sizeof(def)); 2883 2884 if (err != OK) { 2885 return err; 2886 } 2887 2888 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2889 InitOMXParams(&pcmParams); 2890 pcmParams.nPortIndex = portIndex; 2891 2892 err = mOMXNode->getParameter( 2893 OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2894 2895 if (err != OK) { 2896 return err; 2897 } 2898 2899 pcmParams.nChannels = numChannels; 2900 switch (encoding) { 2901 case kAudioEncodingPcm8bit: 2902 pcmParams.eNumData = OMX_NumericalDataUnsigned; 2903 pcmParams.nBitPerSample = 8; 2904 break; 2905 case kAudioEncodingPcmFloat: 2906 pcmParams.eNumData = OMX_NumericalDataFloat; 2907 pcmParams.nBitPerSample = 32; 2908 break; 2909 case kAudioEncodingPcm16bit: 2910 pcmParams.eNumData = OMX_NumericalDataSigned; 2911 pcmParams.nBitPerSample = 16; 2912 break; 2913 default: 2914 return BAD_VALUE; 2915 } 2916 pcmParams.bInterleaved = OMX_TRUE; 2917 pcmParams.nSamplingRate = sampleRate; 2918 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 2919 2920 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 2921 return OMX_ErrorNone; 2922 } 2923 2924 err = mOMXNode->setParameter( 2925 OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2926 // if we could not set up raw format to non-16-bit, try with 16-bit 2927 // NOTE: we will also verify this via readback, in case codec ignores these fields 2928 if (err != OK && encoding != kAudioEncodingPcm16bit) { 2929 pcmParams.eNumData = OMX_NumericalDataSigned; 2930 pcmParams.nBitPerSample = 16; 2931 err = mOMXNode->setParameter( 2932 OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2933 } 2934 return err; 2935} 2936 2937status_t ACodec::configureTunneledVideoPlayback( 2938 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 2939 native_handle_t* sidebandHandle; 2940 2941 status_t err = mOMXNode->configureVideoTunnelMode( 2942 kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 2943 if (err != OK) { 2944 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 2945 return err; 2946 } 2947 2948 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 2949 if (err != OK) { 2950 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 2951 sidebandHandle, err); 2952 return err; 2953 } 2954 2955 return OK; 2956} 2957 2958status_t ACodec::setVideoPortFormatType( 2959 OMX_U32 portIndex, 2960 OMX_VIDEO_CODINGTYPE compressionFormat, 2961 OMX_COLOR_FORMATTYPE colorFormat, 2962 bool usingNativeBuffers) { 2963 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2964 InitOMXParams(&format); 2965 format.nPortIndex = portIndex; 2966 format.nIndex = 0; 2967 bool found = false; 2968 2969 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2970 format.nIndex = index; 2971 status_t err = mOMXNode->getParameter( 2972 OMX_IndexParamVideoPortFormat, 2973 &format, sizeof(format)); 2974 2975 if (err != OK) { 2976 return err; 2977 } 2978 2979 // substitute back flexible color format to codec supported format 2980 OMX_U32 flexibleEquivalent; 2981 if (compressionFormat == OMX_VIDEO_CodingUnused 2982 && IsFlexibleColorFormat( 2983 mOMXNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 2984 && colorFormat == flexibleEquivalent) { 2985 ALOGI("[%s] using color format %#x in place of %#x", 2986 mComponentName.c_str(), format.eColorFormat, colorFormat); 2987 colorFormat = format.eColorFormat; 2988 } 2989 2990 // The following assertion is violated by TI's video decoder. 2991 // CHECK_EQ(format.nIndex, index); 2992 2993 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 2994 if (portIndex == kPortIndexInput 2995 && colorFormat == format.eColorFormat) { 2996 // eCompressionFormat does not seem right. 2997 found = true; 2998 break; 2999 } 3000 if (portIndex == kPortIndexOutput 3001 && compressionFormat == format.eCompressionFormat) { 3002 // eColorFormat does not seem right. 3003 found = true; 3004 break; 3005 } 3006 } 3007 3008 if (format.eCompressionFormat == compressionFormat 3009 && format.eColorFormat == colorFormat) { 3010 found = true; 3011 break; 3012 } 3013 3014 if (index == kMaxIndicesToCheck) { 3015 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 3016 mComponentName.c_str(), index, 3017 asString(format.eCompressionFormat), format.eCompressionFormat, 3018 asString(format.eColorFormat), format.eColorFormat); 3019 } 3020 } 3021 3022 if (!found) { 3023 return UNKNOWN_ERROR; 3024 } 3025 3026 status_t err = mOMXNode->setParameter( 3027 OMX_IndexParamVideoPortFormat, &format, sizeof(format)); 3028 3029 return err; 3030} 3031 3032// Set optimal output format. OMX component lists output formats in the order 3033// of preference, but this got more complicated since the introduction of flexible 3034// YUV formats. We support a legacy behavior for applications that do not use 3035// surface output, do not specify an output format, but expect a "usable" standard 3036// OMX format. SW readable and standard formats must be flex-YUV. 3037// 3038// Suggested preference order: 3039// - optimal format for texture rendering (mediaplayer behavior) 3040// - optimal SW readable & texture renderable format (flex-YUV support) 3041// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 3042// - legacy "usable" standard formats 3043// 3044// For legacy support, we prefer a standard format, but will settle for a SW readable 3045// flex-YUV format. 3046status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 3047 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 3048 InitOMXParams(&format); 3049 format.nPortIndex = kPortIndexOutput; 3050 3051 InitOMXParams(&legacyFormat); 3052 // this field will change when we find a suitable legacy format 3053 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 3054 3055 for (OMX_U32 index = 0; ; ++index) { 3056 format.nIndex = index; 3057 status_t err = mOMXNode->getParameter( 3058 OMX_IndexParamVideoPortFormat, &format, sizeof(format)); 3059 if (err != OK) { 3060 // no more formats, pick legacy format if found 3061 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 3062 memcpy(&format, &legacyFormat, sizeof(format)); 3063 break; 3064 } 3065 return err; 3066 } 3067 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 3068 return OMX_ErrorBadParameter; 3069 } 3070 if (!getLegacyFlexibleFormat) { 3071 break; 3072 } 3073 // standard formats that were exposed to users before 3074 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 3075 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 3076 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 3077 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 3078 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 3079 break; 3080 } 3081 // find best legacy non-standard format 3082 OMX_U32 flexibleEquivalent; 3083 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 3084 && IsFlexibleColorFormat( 3085 mOMXNode, format.eColorFormat, false /* usingNativeBuffers */, 3086 &flexibleEquivalent) 3087 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 3088 memcpy(&legacyFormat, &format, sizeof(format)); 3089 } 3090 } 3091 return mOMXNode->setParameter( 3092 OMX_IndexParamVideoPortFormat, &format, sizeof(format)); 3093} 3094 3095static const struct VideoCodingMapEntry { 3096 const char *mMime; 3097 OMX_VIDEO_CODINGTYPE mVideoCodingType; 3098} kVideoCodingMapEntry[] = { 3099 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 3100 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 3101 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 3102 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 3103 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 3104 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 3105 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 3106 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 3107}; 3108 3109static status_t GetVideoCodingTypeFromMime( 3110 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3111 for (size_t i = 0; 3112 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3113 ++i) { 3114 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3115 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3116 return OK; 3117 } 3118 } 3119 3120 *codingType = OMX_VIDEO_CodingUnused; 3121 3122 return ERROR_UNSUPPORTED; 3123} 3124 3125static status_t GetMimeTypeForVideoCoding( 3126 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3127 for (size_t i = 0; 3128 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3129 ++i) { 3130 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3131 *mime = kVideoCodingMapEntry[i].mMime; 3132 return OK; 3133 } 3134 } 3135 3136 mime->clear(); 3137 3138 return ERROR_UNSUPPORTED; 3139} 3140 3141status_t ACodec::setPortBufferNum(OMX_U32 portIndex, int bufferNum) { 3142 OMX_PARAM_PORTDEFINITIONTYPE def; 3143 InitOMXParams(&def); 3144 def.nPortIndex = portIndex; 3145 status_t err; 3146 ALOGD("Setting [%s] %s port buffer number: %d", mComponentName.c_str(), 3147 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3148 err = mOMXNode->getParameter( 3149 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3150 if (err != OK) { 3151 return err; 3152 } 3153 def.nBufferCountActual = bufferNum; 3154 err = mOMXNode->setParameter( 3155 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3156 if (err != OK) { 3157 // Component could reject this request. 3158 ALOGW("Fail to set [%s] %s port buffer number: %d", mComponentName.c_str(), 3159 portIndex == kPortIndexInput ? "input" : "output", bufferNum); 3160 } 3161 return OK; 3162} 3163 3164status_t ACodec::setupVideoDecoder( 3165 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3166 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3167 int32_t width, height; 3168 if (!msg->findInt32("width", &width) 3169 || !msg->findInt32("height", &height)) { 3170 return INVALID_OPERATION; 3171 } 3172 3173 OMX_VIDEO_CODINGTYPE compressionFormat; 3174 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3175 3176 if (err != OK) { 3177 return err; 3178 } 3179 3180 if (compressionFormat == OMX_VIDEO_CodingVP9) { 3181 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 3182 InitOMXParams(¶ms); 3183 params.nPortIndex = kPortIndexInput; 3184 // Check if VP9 decoder advertises supported profiles. 3185 params.nProfileIndex = 0; 3186 status_t err = mOMXNode->getParameter( 3187 OMX_IndexParamVideoProfileLevelQuerySupported, 3188 ¶ms, sizeof(params)); 3189 mIsLegacyVP9Decoder = err != OK; 3190 } 3191 3192 err = setVideoPortFormatType( 3193 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3194 3195 if (err != OK) { 3196 return err; 3197 } 3198 3199 int32_t tmp; 3200 if (msg->findInt32("color-format", &tmp)) { 3201 OMX_COLOR_FORMATTYPE colorFormat = 3202 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3203 err = setVideoPortFormatType( 3204 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3205 if (err != OK) { 3206 ALOGW("[%s] does not support color format %d", 3207 mComponentName.c_str(), colorFormat); 3208 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3209 } 3210 } else { 3211 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3212 } 3213 3214 if (err != OK) { 3215 return err; 3216 } 3217 3218 // Set the component input buffer number to be |tmp|. If succeed, 3219 // component will set input port buffer number to be |tmp|. If fail, 3220 // component will keep the same buffer number as before. 3221 if (msg->findInt32("android._num-input-buffers", &tmp)) { 3222 err = setPortBufferNum(kPortIndexInput, tmp); 3223 if (err != OK) 3224 return err; 3225 } 3226 3227 // Set the component output buffer number to be |tmp|. If succeed, 3228 // component will set output port buffer number to be |tmp|. If fail, 3229 // component will keep the same buffer number as before. 3230 if (msg->findInt32("android._num-output-buffers", &tmp)) { 3231 err = setPortBufferNum(kPortIndexOutput, tmp); 3232 if (err != OK) 3233 return err; 3234 } 3235 3236 int32_t frameRateInt; 3237 float frameRateFloat; 3238 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3239 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3240 frameRateInt = -1; 3241 } 3242 frameRateFloat = (float)frameRateInt; 3243 } 3244 3245 err = setVideoFormatOnPort( 3246 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3247 3248 if (err != OK) { 3249 return err; 3250 } 3251 3252 err = setVideoFormatOnPort( 3253 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3254 3255 if (err != OK) { 3256 return err; 3257 } 3258 3259 err = setColorAspectsForVideoDecoder( 3260 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3261 if (err == ERROR_UNSUPPORTED) { // support is optional 3262 err = OK; 3263 } 3264 3265 if (err != OK) { 3266 return err; 3267 } 3268 3269 err = setHDRStaticInfoForVideoCodec(kPortIndexOutput, msg, outputFormat); 3270 if (err == ERROR_UNSUPPORTED) { // support is optional 3271 err = OK; 3272 } 3273 return err; 3274} 3275 3276status_t ACodec::initDescribeColorAspectsIndex() { 3277 status_t err = mOMXNode->getExtensionIndex( 3278 "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3279 if (err != OK) { 3280 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3281 } 3282 return err; 3283} 3284 3285status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3286 status_t err = ERROR_UNSUPPORTED; 3287 if (mDescribeColorAspectsIndex) { 3288 err = mOMXNode->setConfig(mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3289 } 3290 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3291 mComponentName.c_str(), 3292 params.sAspects.mRange, asString(params.sAspects.mRange), 3293 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3294 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3295 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3296 err, asString(err)); 3297 3298 if (verify && err == OK) { 3299 err = getCodecColorAspects(params); 3300 } 3301 3302 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3303 "[%s] setting color aspects failed even though codec advertises support", 3304 mComponentName.c_str()); 3305 return err; 3306} 3307 3308status_t ACodec::setColorAspectsForVideoDecoder( 3309 int32_t width, int32_t height, bool usingNativeWindow, 3310 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3311 DescribeColorAspectsParams params; 3312 InitOMXParams(¶ms); 3313 params.nPortIndex = kPortIndexOutput; 3314 3315 getColorAspectsFromFormat(configFormat, params.sAspects); 3316 if (usingNativeWindow) { 3317 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3318 // The default aspects will be set back to the output format during the 3319 // getFormat phase of configure(). Set non-Unspecified values back into the 3320 // format, in case component does not support this enumeration. 3321 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3322 } 3323 3324 (void)initDescribeColorAspectsIndex(); 3325 3326 // communicate color aspects to codec 3327 return setCodecColorAspects(params); 3328} 3329 3330status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3331 status_t err = ERROR_UNSUPPORTED; 3332 if (mDescribeColorAspectsIndex) { 3333 err = mOMXNode->getConfig(mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3334 } 3335 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3336 mComponentName.c_str(), 3337 params.sAspects.mRange, asString(params.sAspects.mRange), 3338 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3339 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3340 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3341 err, asString(err)); 3342 if (params.bRequestingDataSpace) { 3343 ALOGV("for dataspace %#x", params.nDataSpace); 3344 } 3345 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3346 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3347 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3348 mComponentName.c_str()); 3349 } 3350 return err; 3351} 3352 3353status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3354 DescribeColorAspectsParams params; 3355 InitOMXParams(¶ms); 3356 params.nPortIndex = kPortIndexInput; 3357 status_t err = getCodecColorAspects(params); 3358 if (err == OK) { 3359 // we only set encoder input aspects if codec supports them 3360 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3361 } 3362 return err; 3363} 3364 3365status_t ACodec::getDataSpace( 3366 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3367 bool tryCodec) { 3368 status_t err = OK; 3369 if (tryCodec) { 3370 // request dataspace guidance from codec. 3371 params.bRequestingDataSpace = OMX_TRUE; 3372 err = getCodecColorAspects(params); 3373 params.bRequestingDataSpace = OMX_FALSE; 3374 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3375 *dataSpace = (android_dataspace)params.nDataSpace; 3376 return err; 3377 } else if (err == ERROR_UNSUPPORTED) { 3378 // ignore not-implemented error for dataspace requests 3379 err = OK; 3380 } 3381 } 3382 3383 // this returns legacy versions if available 3384 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3385 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3386 "and dataspace %#x", 3387 mComponentName.c_str(), 3388 params.sAspects.mRange, asString(params.sAspects.mRange), 3389 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3390 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3391 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3392 *dataSpace); 3393 return err; 3394} 3395 3396 3397status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3398 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3399 android_dataspace *dataSpace) { 3400 DescribeColorAspectsParams params; 3401 InitOMXParams(¶ms); 3402 params.nPortIndex = kPortIndexOutput; 3403 3404 // reset default format and get resulting format 3405 getColorAspectsFromFormat(configFormat, params.sAspects); 3406 if (dataSpace != NULL) { 3407 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3408 } 3409 status_t err = setCodecColorAspects(params, true /* readBack */); 3410 3411 // we always set specified aspects for decoders 3412 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3413 3414 if (dataSpace != NULL) { 3415 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3416 if (err == OK) { 3417 err = res; 3418 } 3419 } 3420 3421 return err; 3422} 3423 3424// initial video encoder setup for bytebuffer mode 3425status_t ACodec::setColorAspectsForVideoEncoder( 3426 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3427 // copy config to output format as this is not exposed via getFormat 3428 copyColorConfig(configFormat, outputFormat); 3429 3430 DescribeColorAspectsParams params; 3431 InitOMXParams(¶ms); 3432 params.nPortIndex = kPortIndexInput; 3433 getColorAspectsFromFormat(configFormat, params.sAspects); 3434 3435 (void)initDescribeColorAspectsIndex(); 3436 3437 int32_t usingRecorder; 3438 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3439 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3440 int32_t width, height; 3441 if (configFormat->findInt32("width", &width) 3442 && configFormat->findInt32("height", &height)) { 3443 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3444 status_t err = getDataSpace( 3445 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3446 if (err != OK) { 3447 return err; 3448 } 3449 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3450 } 3451 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3452 } 3453 3454 // communicate color aspects to codec, but do not allow change of the platform aspects 3455 ColorAspects origAspects = params.sAspects; 3456 for (int triesLeft = 2; --triesLeft >= 0; ) { 3457 status_t err = setCodecColorAspects(params, true /* readBack */); 3458 if (err != OK 3459 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3460 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3461 return err; 3462 } 3463 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3464 mComponentName.c_str()); 3465 } 3466 return OK; 3467} 3468 3469status_t ACodec::setHDRStaticInfoForVideoCodec( 3470 OMX_U32 portIndex, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3471 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3472 3473 DescribeHDRStaticInfoParams params; 3474 InitOMXParams(¶ms); 3475 params.nPortIndex = portIndex; 3476 3477 HDRStaticInfo *info = ¶ms.sInfo; 3478 if (getHDRStaticInfoFromFormat(configFormat, info)) { 3479 setHDRStaticInfoIntoFormat(params.sInfo, outputFormat); 3480 } 3481 3482 (void)initDescribeHDRStaticInfoIndex(); 3483 3484 // communicate HDR static Info to codec 3485 return setHDRStaticInfo(params); 3486} 3487 3488// subsequent initial video encoder setup for surface mode 3489status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3490 android_dataspace *dataSpace /* nonnull */) { 3491 DescribeColorAspectsParams params; 3492 InitOMXParams(¶ms); 3493 params.nPortIndex = kPortIndexInput; 3494 ColorAspects &aspects = params.sAspects; 3495 3496 // reset default format and store resulting format into both input and output formats 3497 getColorAspectsFromFormat(mConfigFormat, aspects); 3498 int32_t width, height; 3499 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3500 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3501 } 3502 setColorAspectsIntoFormat(aspects, mInputFormat); 3503 setColorAspectsIntoFormat(aspects, mOutputFormat); 3504 3505 // communicate color aspects to codec, but do not allow any change 3506 ColorAspects origAspects = aspects; 3507 status_t err = OK; 3508 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3509 status_t err = setCodecColorAspects(params, true /* readBack */); 3510 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3511 break; 3512 } 3513 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3514 mComponentName.c_str()); 3515 } 3516 3517 *dataSpace = HAL_DATASPACE_BT709; 3518 aspects = origAspects; // restore desired color aspects 3519 status_t res = getDataSpace( 3520 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3521 if (err == OK) { 3522 err = res; 3523 } 3524 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3525 mInputFormat->setBuffer( 3526 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3527 3528 // update input format with codec supported color aspects (basically set unsupported 3529 // aspects to Unspecified) 3530 if (err == OK) { 3531 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3532 } 3533 3534 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3535 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3536 3537 return err; 3538} 3539 3540status_t ACodec::getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format) { 3541 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 3542 DescribeHDRStaticInfoParams params; 3543 InitOMXParams(¶ms); 3544 params.nPortIndex = portIndex; 3545 3546 status_t err = getHDRStaticInfo(params); 3547 if (err == OK) { 3548 // we only set decodec output HDRStaticInfo if codec supports them 3549 setHDRStaticInfoIntoFormat(params.sInfo, format); 3550 } 3551 return err; 3552} 3553 3554status_t ACodec::initDescribeHDRStaticInfoIndex() { 3555 status_t err = mOMXNode->getExtensionIndex( 3556 "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex); 3557 if (err != OK) { 3558 mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0; 3559 } 3560 return err; 3561} 3562 3563status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) { 3564 status_t err = ERROR_UNSUPPORTED; 3565 if (mDescribeHDRStaticInfoIndex) { 3566 err = mOMXNode->setConfig(mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3567 } 3568 3569 const HDRStaticInfo *info = ¶ms.sInfo; 3570 ALOGV("[%s] setting HDRStaticInfo (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, " 3571 "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)", 3572 mComponentName.c_str(), 3573 info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y, 3574 info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y, 3575 info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance, 3576 info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel); 3577 3578 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3579 "[%s] setting HDRStaticInfo failed even though codec advertises support", 3580 mComponentName.c_str()); 3581 return err; 3582} 3583 3584status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms) { 3585 status_t err = ERROR_UNSUPPORTED; 3586 if (mDescribeHDRStaticInfoIndex) { 3587 err = mOMXNode->getConfig(mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3588 } 3589 3590 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3591 "[%s] getting HDRStaticInfo failed even though codec advertises support", 3592 mComponentName.c_str()); 3593 return err; 3594} 3595 3596status_t ACodec::setupVideoEncoder( 3597 const char *mime, const sp<AMessage> &msg, 3598 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3599 int32_t tmp; 3600 if (!msg->findInt32("color-format", &tmp)) { 3601 return INVALID_OPERATION; 3602 } 3603 3604 OMX_COLOR_FORMATTYPE colorFormat = 3605 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3606 3607 status_t err = setVideoPortFormatType( 3608 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3609 3610 if (err != OK) { 3611 ALOGE("[%s] does not support color format %d", 3612 mComponentName.c_str(), colorFormat); 3613 3614 return err; 3615 } 3616 3617 /* Input port configuration */ 3618 3619 OMX_PARAM_PORTDEFINITIONTYPE def; 3620 InitOMXParams(&def); 3621 3622 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3623 3624 def.nPortIndex = kPortIndexInput; 3625 3626 err = mOMXNode->getParameter( 3627 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3628 3629 if (err != OK) { 3630 return err; 3631 } 3632 3633 int32_t width, height, bitrate; 3634 if (!msg->findInt32("width", &width) 3635 || !msg->findInt32("height", &height) 3636 || !msg->findInt32("bitrate", &bitrate)) { 3637 return INVALID_OPERATION; 3638 } 3639 3640 video_def->nFrameWidth = width; 3641 video_def->nFrameHeight = height; 3642 3643 int32_t stride; 3644 if (!msg->findInt32("stride", &stride)) { 3645 stride = width; 3646 } 3647 3648 video_def->nStride = stride; 3649 3650 int32_t sliceHeight; 3651 if (!msg->findInt32("slice-height", &sliceHeight)) { 3652 sliceHeight = height; 3653 } 3654 3655 video_def->nSliceHeight = sliceHeight; 3656 3657 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3658 3659 float frameRate; 3660 if (!msg->findFloat("frame-rate", &frameRate)) { 3661 int32_t tmp; 3662 if (!msg->findInt32("frame-rate", &tmp)) { 3663 return INVALID_OPERATION; 3664 } 3665 frameRate = (float)tmp; 3666 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3667 } 3668 3669 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3670 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3671 // this is redundant as it was already set up in setVideoPortFormatType 3672 // FIXME for now skip this only for flexible YUV formats 3673 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3674 video_def->eColorFormat = colorFormat; 3675 } 3676 3677 err = mOMXNode->setParameter( 3678 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3679 3680 if (err != OK) { 3681 ALOGE("[%s] failed to set input port definition parameters.", 3682 mComponentName.c_str()); 3683 3684 return err; 3685 } 3686 3687 /* Output port configuration */ 3688 3689 OMX_VIDEO_CODINGTYPE compressionFormat; 3690 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3691 3692 if (err != OK) { 3693 return err; 3694 } 3695 3696 err = setVideoPortFormatType( 3697 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3698 3699 if (err != OK) { 3700 ALOGE("[%s] does not support compression format %d", 3701 mComponentName.c_str(), compressionFormat); 3702 3703 return err; 3704 } 3705 3706 def.nPortIndex = kPortIndexOutput; 3707 3708 err = mOMXNode->getParameter( 3709 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3710 3711 if (err != OK) { 3712 return err; 3713 } 3714 3715 video_def->nFrameWidth = width; 3716 video_def->nFrameHeight = height; 3717 video_def->xFramerate = 0; 3718 video_def->nBitrate = bitrate; 3719 video_def->eCompressionFormat = compressionFormat; 3720 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3721 3722 err = mOMXNode->setParameter( 3723 OMX_IndexParamPortDefinition, &def, sizeof(def)); 3724 3725 if (err != OK) { 3726 ALOGE("[%s] failed to set output port definition parameters.", 3727 mComponentName.c_str()); 3728 3729 return err; 3730 } 3731 3732 int32_t intraRefreshPeriod = 0; 3733 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3734 && intraRefreshPeriod >= 0) { 3735 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3736 if (err != OK) { 3737 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3738 mComponentName.c_str()); 3739 err = OK; 3740 } 3741 } 3742 3743 switch (compressionFormat) { 3744 case OMX_VIDEO_CodingMPEG4: 3745 err = setupMPEG4EncoderParameters(msg); 3746 break; 3747 3748 case OMX_VIDEO_CodingH263: 3749 err = setupH263EncoderParameters(msg); 3750 break; 3751 3752 case OMX_VIDEO_CodingAVC: 3753 err = setupAVCEncoderParameters(msg); 3754 break; 3755 3756 case OMX_VIDEO_CodingHEVC: 3757 err = setupHEVCEncoderParameters(msg); 3758 break; 3759 3760 case OMX_VIDEO_CodingVP8: 3761 case OMX_VIDEO_CodingVP9: 3762 err = setupVPXEncoderParameters(msg, outputFormat); 3763 break; 3764 3765 default: 3766 break; 3767 } 3768 3769 if (err != OK) { 3770 return err; 3771 } 3772 3773 // Set up color aspects on input, but propagate them to the output format, as they will 3774 // not be read back from encoder. 3775 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3776 if (err == ERROR_UNSUPPORTED) { 3777 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3778 err = OK; 3779 } 3780 3781 if (err != OK) { 3782 return err; 3783 } 3784 3785 err = setHDRStaticInfoForVideoCodec(kPortIndexInput, msg, outputFormat); 3786 if (err == ERROR_UNSUPPORTED) { // support is optional 3787 ALOGI("[%s] cannot encode HDR static metadata. Ignoring.", mComponentName.c_str()); 3788 err = OK; 3789 } 3790 3791 if (err != OK) { 3792 return err; 3793 } 3794 3795 switch (compressionFormat) { 3796 case OMX_VIDEO_CodingAVC: 3797 case OMX_VIDEO_CodingHEVC: 3798 err = configureTemporalLayers(msg, true /* inConfigure */, outputFormat); 3799 if (err != OK) { 3800 err = OK; // ignore failure 3801 } 3802 break; 3803 3804 case OMX_VIDEO_CodingVP8: 3805 case OMX_VIDEO_CodingVP9: 3806 // TODO: do we need to support android.generic layering? webrtc layering is 3807 // already set up in setupVPXEncoderParameters. 3808 break; 3809 3810 default: 3811 break; 3812 } 3813 3814 if (err == OK) { 3815 ALOGI("setupVideoEncoder succeeded"); 3816 } 3817 3818 return err; 3819} 3820 3821status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3822 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3823 InitOMXParams(¶ms); 3824 params.nPortIndex = kPortIndexOutput; 3825 3826 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3827 3828 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3829 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3830 int32_t mbs; 3831 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3832 return INVALID_OPERATION; 3833 } 3834 params.nCirMBs = mbs; 3835 } 3836 3837 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3838 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3839 int32_t mbs; 3840 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3841 return INVALID_OPERATION; 3842 } 3843 params.nAirMBs = mbs; 3844 3845 int32_t ref; 3846 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3847 return INVALID_OPERATION; 3848 } 3849 params.nAirRef = ref; 3850 } 3851 3852 status_t err = mOMXNode->setParameter( 3853 OMX_IndexParamVideoIntraRefresh, ¶ms, sizeof(params)); 3854 return err; 3855} 3856 3857static OMX_U32 setPFramesSpacing( 3858 float iFramesInterval /* seconds */, int32_t frameRate, uint32_t BFramesSpacing = 0) { 3859 // BFramesSpacing is the number of B frames between I/P frames 3860 // PFramesSpacing (the value to be returned) is the number of P frames between I frames 3861 // 3862 // keyFrameInterval = ((PFramesSpacing + 1) * BFramesSpacing) + PFramesSpacing + 1 3863 // ^^^ ^^^ ^^^ 3864 // number of B frames number of P I frame 3865 // 3866 // = (PFramesSpacing + 1) * (BFramesSpacing + 1) 3867 // 3868 // E.g. 3869 // I P I : I-interval: 8, nPFrames 1, nBFrames 3 3870 // BBB BBB 3871 3872 if (iFramesInterval < 0) { // just 1 key frame 3873 return 0xFFFFFFFE; // don't use maxint as key-frame-interval calculation will add 1 3874 } else if (iFramesInterval == 0) { // just key frames 3875 return 0; 3876 } 3877 3878 // round down as key-frame-interval is an upper limit 3879 uint32_t keyFrameInterval = uint32_t(frameRate * iFramesInterval); 3880 OMX_U32 ret = keyFrameInterval / (BFramesSpacing + 1); 3881 return ret > 0 ? ret - 1 : 0; 3882} 3883 3884static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 3885 int32_t tmp; 3886 if (!msg->findInt32("bitrate-mode", &tmp)) { 3887 return OMX_Video_ControlRateVariable; 3888 } 3889 3890 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 3891} 3892 3893status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 3894 int32_t bitrate; 3895 float iFrameInterval; 3896 if (!msg->findInt32("bitrate", &bitrate) 3897 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 3898 return INVALID_OPERATION; 3899 } 3900 3901 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3902 3903 float frameRate; 3904 if (!msg->findFloat("frame-rate", &frameRate)) { 3905 int32_t tmp; 3906 if (!msg->findInt32("frame-rate", &tmp)) { 3907 return INVALID_OPERATION; 3908 } 3909 frameRate = (float)tmp; 3910 } 3911 3912 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 3913 InitOMXParams(&mpeg4type); 3914 mpeg4type.nPortIndex = kPortIndexOutput; 3915 3916 status_t err = mOMXNode->getParameter( 3917 OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3918 3919 if (err != OK) { 3920 return err; 3921 } 3922 3923 mpeg4type.nSliceHeaderSpacing = 0; 3924 mpeg4type.bSVH = OMX_FALSE; 3925 mpeg4type.bGov = OMX_FALSE; 3926 3927 mpeg4type.nAllowedPictureTypes = 3928 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3929 3930 mpeg4type.nBFrames = 0; 3931 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, mpeg4type.nBFrames); 3932 if (mpeg4type.nPFrames == 0) { 3933 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3934 } 3935 mpeg4type.nIDCVLCThreshold = 0; 3936 mpeg4type.bACPred = OMX_TRUE; 3937 mpeg4type.nMaxPacketSize = 256; 3938 mpeg4type.nTimeIncRes = 1000; 3939 mpeg4type.nHeaderExtension = 0; 3940 mpeg4type.bReversibleVLC = OMX_FALSE; 3941 3942 int32_t profile; 3943 if (msg->findInt32("profile", &profile)) { 3944 int32_t level; 3945 if (!msg->findInt32("level", &level)) { 3946 return INVALID_OPERATION; 3947 } 3948 3949 err = verifySupportForProfileAndLevel(profile, level); 3950 3951 if (err != OK) { 3952 return err; 3953 } 3954 3955 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 3956 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 3957 } 3958 3959 err = mOMXNode->setParameter( 3960 OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3961 3962 if (err != OK) { 3963 return err; 3964 } 3965 3966 err = configureBitrate(bitrate, bitrateMode); 3967 3968 if (err != OK) { 3969 return err; 3970 } 3971 3972 return setupErrorCorrectionParameters(); 3973} 3974 3975status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 3976 int32_t bitrate; 3977 float iFrameInterval; 3978 if (!msg->findInt32("bitrate", &bitrate) 3979 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 3980 return INVALID_OPERATION; 3981 } 3982 3983 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3984 3985 float frameRate; 3986 if (!msg->findFloat("frame-rate", &frameRate)) { 3987 int32_t tmp; 3988 if (!msg->findInt32("frame-rate", &tmp)) { 3989 return INVALID_OPERATION; 3990 } 3991 frameRate = (float)tmp; 3992 } 3993 3994 OMX_VIDEO_PARAM_H263TYPE h263type; 3995 InitOMXParams(&h263type); 3996 h263type.nPortIndex = kPortIndexOutput; 3997 3998 status_t err = mOMXNode->getParameter( 3999 OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 4000 4001 if (err != OK) { 4002 return err; 4003 } 4004 4005 h263type.nAllowedPictureTypes = 4006 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4007 4008 h263type.nBFrames = 0; 4009 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h263type.nBFrames); 4010 if (h263type.nPFrames == 0) { 4011 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4012 } 4013 4014 int32_t profile; 4015 if (msg->findInt32("profile", &profile)) { 4016 int32_t level; 4017 if (!msg->findInt32("level", &level)) { 4018 return INVALID_OPERATION; 4019 } 4020 4021 err = verifySupportForProfileAndLevel(profile, level); 4022 4023 if (err != OK) { 4024 return err; 4025 } 4026 4027 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 4028 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 4029 } 4030 4031 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 4032 h263type.bForceRoundingTypeToZero = OMX_FALSE; 4033 h263type.nPictureHeaderRepetition = 0; 4034 h263type.nGOBHeaderInterval = 0; 4035 4036 err = mOMXNode->setParameter( 4037 OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 4038 4039 if (err != OK) { 4040 return err; 4041 } 4042 4043 err = configureBitrate(bitrate, bitrateMode); 4044 4045 if (err != OK) { 4046 return err; 4047 } 4048 4049 return setupErrorCorrectionParameters(); 4050} 4051 4052// static 4053int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 4054 int width, int height, int rate, int bitrate, 4055 OMX_VIDEO_AVCPROFILETYPE profile) { 4056 // convert bitrate to main/baseline profile kbps equivalent 4057 switch (profile) { 4058 case OMX_VIDEO_AVCProfileHigh10: 4059 bitrate = divUp(bitrate, 3000); break; 4060 case OMX_VIDEO_AVCProfileHigh: 4061 bitrate = divUp(bitrate, 1250); break; 4062 default: 4063 bitrate = divUp(bitrate, 1000); break; 4064 } 4065 4066 // convert size and rate to MBs 4067 width = divUp(width, 16); 4068 height = divUp(height, 16); 4069 int mbs = width * height; 4070 rate *= mbs; 4071 int maxDimension = max(width, height); 4072 4073 static const int limits[][5] = { 4074 /* MBps MB dim bitrate level */ 4075 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 4076 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 4077 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 4078 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 4079 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 4080 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 4081 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 4082 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 4083 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 4084 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 4085 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 4086 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 4087 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 4088 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 4089 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 4090 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 4091 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 4092 }; 4093 4094 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 4095 const int (&limit)[5] = limits[i]; 4096 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 4097 && bitrate <= limit[3]) { 4098 return limit[4]; 4099 } 4100 } 4101 return 0; 4102} 4103 4104status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 4105 int32_t bitrate; 4106 float iFrameInterval; 4107 if (!msg->findInt32("bitrate", &bitrate) 4108 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 4109 return INVALID_OPERATION; 4110 } 4111 4112 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4113 4114 float frameRate; 4115 if (!msg->findFloat("frame-rate", &frameRate)) { 4116 int32_t tmp; 4117 if (!msg->findInt32("frame-rate", &tmp)) { 4118 return INVALID_OPERATION; 4119 } 4120 frameRate = (float)tmp; 4121 } 4122 4123 status_t err = OK; 4124 int32_t intraRefreshMode = 0; 4125 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 4126 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 4127 if (err != OK) { 4128 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 4129 err, intraRefreshMode); 4130 return err; 4131 } 4132 } 4133 4134 OMX_VIDEO_PARAM_AVCTYPE h264type; 4135 InitOMXParams(&h264type); 4136 h264type.nPortIndex = kPortIndexOutput; 4137 4138 err = mOMXNode->getParameter( 4139 OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4140 4141 if (err != OK) { 4142 return err; 4143 } 4144 4145 h264type.nAllowedPictureTypes = 4146 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4147 4148 int32_t profile; 4149 if (msg->findInt32("profile", &profile)) { 4150 int32_t level; 4151 if (!msg->findInt32("level", &level)) { 4152 return INVALID_OPERATION; 4153 } 4154 4155 err = verifySupportForProfileAndLevel(profile, level); 4156 4157 if (err != OK) { 4158 return err; 4159 } 4160 4161 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 4162 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 4163 } else { 4164 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 4165#if 0 /* DON'T YET DEFAULT TO HIGHEST PROFILE */ 4166 // Use largest supported profile for AVC recording if profile is not specified. 4167 for (OMX_VIDEO_AVCPROFILETYPE profile : { 4168 OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCProfileMain }) { 4169 if (verifySupportForProfileAndLevel(profile, 0) == OK) { 4170 h264type.eProfile = profile; 4171 break; 4172 } 4173 } 4174#endif 4175 } 4176 4177 ALOGI("setupAVCEncoderParameters with [profile: %s] [level: %s]", 4178 asString(h264type.eProfile), asString(h264type.eLevel)); 4179 4180 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 4181 h264type.nSliceHeaderSpacing = 0; 4182 h264type.bUseHadamard = OMX_TRUE; 4183 h264type.nRefFrames = 1; 4184 h264type.nBFrames = 0; 4185 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4186 if (h264type.nPFrames == 0) { 4187 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4188 } 4189 h264type.nRefIdx10ActiveMinus1 = 0; 4190 h264type.nRefIdx11ActiveMinus1 = 0; 4191 h264type.bEntropyCodingCABAC = OMX_FALSE; 4192 h264type.bWeightedPPrediction = OMX_FALSE; 4193 h264type.bconstIpred = OMX_FALSE; 4194 h264type.bDirect8x8Inference = OMX_FALSE; 4195 h264type.bDirectSpatialTemporal = OMX_FALSE; 4196 h264type.nCabacInitIdc = 0; 4197 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 4198 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 4199 h264type.nSliceHeaderSpacing = 0; 4200 h264type.bUseHadamard = OMX_TRUE; 4201 h264type.nRefFrames = 2; 4202 h264type.nBFrames = 1; 4203 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4204 h264type.nAllowedPictureTypes = 4205 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 4206 h264type.nRefIdx10ActiveMinus1 = 0; 4207 h264type.nRefIdx11ActiveMinus1 = 0; 4208 h264type.bEntropyCodingCABAC = OMX_TRUE; 4209 h264type.bWeightedPPrediction = OMX_TRUE; 4210 h264type.bconstIpred = OMX_TRUE; 4211 h264type.bDirect8x8Inference = OMX_TRUE; 4212 h264type.bDirectSpatialTemporal = OMX_TRUE; 4213 h264type.nCabacInitIdc = 1; 4214 } 4215 4216 if (h264type.nBFrames != 0) { 4217 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 4218 } 4219 4220 h264type.bEnableUEP = OMX_FALSE; 4221 h264type.bEnableFMO = OMX_FALSE; 4222 h264type.bEnableASO = OMX_FALSE; 4223 h264type.bEnableRS = OMX_FALSE; 4224 h264type.bFrameMBsOnly = OMX_TRUE; 4225 h264type.bMBAFF = OMX_FALSE; 4226 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 4227 4228 err = mOMXNode->setParameter( 4229 OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4230 4231 if (err != OK) { 4232 return err; 4233 } 4234 4235 // TRICKY: if we are enabling temporal layering as well, some codecs may not support layering 4236 // when B-frames are enabled. Detect this now so we can disable B frames if temporal layering 4237 // is preferred. 4238 AString tsSchema; 4239 int32_t preferBFrames = (int32_t)false; 4240 if (msg->findString("ts-schema", &tsSchema) 4241 && (!msg->findInt32("android._prefer-b-frames", &preferBFrames) || !preferBFrames)) { 4242 OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layering; 4243 InitOMXParams(&layering); 4244 layering.nPortIndex = kPortIndexOutput; 4245 if (mOMXNode->getParameter( 4246 (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering, 4247 &layering, sizeof(layering)) == OK 4248 && layering.eSupportedPatterns 4249 && layering.nBLayerCountMax == 0) { 4250 h264type.nBFrames = 0; 4251 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames); 4252 h264type.nAllowedPictureTypes &= ~OMX_VIDEO_PictureTypeB; 4253 ALOGI("disabling B-frames"); 4254 err = mOMXNode->setParameter( 4255 OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4256 4257 if (err != OK) { 4258 return err; 4259 } 4260 } 4261 } 4262 4263 return configureBitrate(bitrate, bitrateMode); 4264} 4265 4266status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 4267 int32_t bitrate; 4268 float iFrameInterval; 4269 if (!msg->findInt32("bitrate", &bitrate) 4270 || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) { 4271 return INVALID_OPERATION; 4272 } 4273 4274 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4275 4276 float frameRate; 4277 if (!msg->findFloat("frame-rate", &frameRate)) { 4278 int32_t tmp; 4279 if (!msg->findInt32("frame-rate", &tmp)) { 4280 return INVALID_OPERATION; 4281 } 4282 frameRate = (float)tmp; 4283 } 4284 4285 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 4286 InitOMXParams(&hevcType); 4287 hevcType.nPortIndex = kPortIndexOutput; 4288 4289 status_t err = OK; 4290 err = mOMXNode->getParameter( 4291 (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4292 if (err != OK) { 4293 return err; 4294 } 4295 4296 int32_t profile; 4297 if (msg->findInt32("profile", &profile)) { 4298 int32_t level; 4299 if (!msg->findInt32("level", &level)) { 4300 return INVALID_OPERATION; 4301 } 4302 4303 err = verifySupportForProfileAndLevel(profile, level); 4304 if (err != OK) { 4305 return err; 4306 } 4307 4308 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 4309 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 4310 } 4311 // TODO: finer control? 4312 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1; 4313 4314 err = mOMXNode->setParameter( 4315 (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4316 if (err != OK) { 4317 return err; 4318 } 4319 4320 return configureBitrate(bitrate, bitrateMode); 4321} 4322 4323status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg, sp<AMessage> &outputFormat) { 4324 int32_t bitrate; 4325 float iFrameInterval = 0; 4326 size_t tsLayers = 0; 4327 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 4328 OMX_VIDEO_VPXTemporalLayerPatternNone; 4329 static const uint32_t kVp8LayerRateAlloction 4330 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 4331 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 4332 {100, 100, 100}, // 1 layer 4333 { 60, 100, 100}, // 2 layers {60%, 40%} 4334 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 4335 }; 4336 if (!msg->findInt32("bitrate", &bitrate)) { 4337 return INVALID_OPERATION; 4338 } 4339 msg->findAsFloat("i-frame-interval", &iFrameInterval); 4340 4341 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4342 4343 float frameRate; 4344 if (!msg->findFloat("frame-rate", &frameRate)) { 4345 int32_t tmp; 4346 if (!msg->findInt32("frame-rate", &tmp)) { 4347 return INVALID_OPERATION; 4348 } 4349 frameRate = (float)tmp; 4350 } 4351 4352 AString tsSchema; 4353 OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE tsType = 4354 OMX_VIDEO_AndroidTemporalLayeringPatternNone; 4355 4356 if (msg->findString("ts-schema", &tsSchema)) { 4357 unsigned int numLayers = 0; 4358 unsigned int numBLayers = 0; 4359 int tags; 4360 char dummy; 4361 if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1 4362 && numLayers > 0) { 4363 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4364 tsType = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC; 4365 tsLayers = numLayers; 4366 } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c", 4367 &numLayers, &dummy, &numBLayers, &dummy)) 4368 && (tags == 1 || (tags == 3 && dummy == '+')) 4369 && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) { 4370 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4371 // VPX does not have a concept of B-frames, so just count all layers 4372 tsType = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid; 4373 tsLayers = numLayers + numBLayers; 4374 } else { 4375 ALOGW("Ignoring unsupported ts-schema [%s]", tsSchema.c_str()); 4376 } 4377 tsLayers = min(tsLayers, (size_t)OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS); 4378 } 4379 4380 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4381 InitOMXParams(&vp8type); 4382 vp8type.nPortIndex = kPortIndexOutput; 4383 status_t err = mOMXNode->getParameter( 4384 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4385 &vp8type, sizeof(vp8type)); 4386 4387 if (err == OK) { 4388 if (iFrameInterval > 0) { 4389 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1; 4390 } 4391 vp8type.eTemporalPattern = pattern; 4392 vp8type.nTemporalLayerCount = tsLayers; 4393 if (tsLayers > 0) { 4394 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4395 vp8type.nTemporalLayerBitrateRatio[i] = 4396 kVp8LayerRateAlloction[tsLayers - 1][i]; 4397 } 4398 } 4399 if (bitrateMode == OMX_Video_ControlRateConstant) { 4400 vp8type.nMinQuantizer = 2; 4401 vp8type.nMaxQuantizer = 63; 4402 } 4403 4404 err = mOMXNode->setParameter( 4405 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4406 &vp8type, sizeof(vp8type)); 4407 if (err != OK) { 4408 ALOGW("Extended VP8 parameters set failed: %d", err); 4409 } else if (tsType == OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC) { 4410 // advertise even single layer WebRTC layering, as it is defined 4411 outputFormat->setString("ts-schema", AStringPrintf("webrtc.vp8.%u-layer", tsLayers)); 4412 } else if (tsLayers > 0) { 4413 // tsType == OMX_VIDEO_AndroidTemporalLayeringPatternAndroid 4414 outputFormat->setString("ts-schema", AStringPrintf("android.generic.%u", tsLayers)); 4415 } 4416 } 4417 4418 return configureBitrate(bitrate, bitrateMode); 4419} 4420 4421status_t ACodec::verifySupportForProfileAndLevel( 4422 int32_t profile, int32_t level) { 4423 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4424 InitOMXParams(¶ms); 4425 params.nPortIndex = kPortIndexOutput; 4426 4427 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4428 params.nProfileIndex = index; 4429 status_t err = mOMXNode->getParameter( 4430 OMX_IndexParamVideoProfileLevelQuerySupported, 4431 ¶ms, sizeof(params)); 4432 4433 if (err != OK) { 4434 return err; 4435 } 4436 4437 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4438 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4439 4440 if (profile == supportedProfile && level <= supportedLevel) { 4441 return OK; 4442 } 4443 4444 if (index == kMaxIndicesToCheck) { 4445 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4446 mComponentName.c_str(), index, 4447 params.eProfile, params.eLevel); 4448 } 4449 } 4450 return ERROR_UNSUPPORTED; 4451} 4452 4453status_t ACodec::configureBitrate( 4454 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4455 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4456 InitOMXParams(&bitrateType); 4457 bitrateType.nPortIndex = kPortIndexOutput; 4458 4459 status_t err = mOMXNode->getParameter( 4460 OMX_IndexParamVideoBitrate, &bitrateType, sizeof(bitrateType)); 4461 4462 if (err != OK) { 4463 return err; 4464 } 4465 4466 bitrateType.eControlRate = bitrateMode; 4467 bitrateType.nTargetBitrate = bitrate; 4468 4469 return mOMXNode->setParameter( 4470 OMX_IndexParamVideoBitrate, &bitrateType, sizeof(bitrateType)); 4471} 4472 4473status_t ACodec::setupErrorCorrectionParameters() { 4474 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4475 InitOMXParams(&errorCorrectionType); 4476 errorCorrectionType.nPortIndex = kPortIndexOutput; 4477 4478 status_t err = mOMXNode->getParameter( 4479 OMX_IndexParamVideoErrorCorrection, 4480 &errorCorrectionType, sizeof(errorCorrectionType)); 4481 4482 if (err != OK) { 4483 return OK; // Optional feature. Ignore this failure 4484 } 4485 4486 errorCorrectionType.bEnableHEC = OMX_FALSE; 4487 errorCorrectionType.bEnableResync = OMX_TRUE; 4488 errorCorrectionType.nResynchMarkerSpacing = 256; 4489 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4490 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4491 4492 return mOMXNode->setParameter( 4493 OMX_IndexParamVideoErrorCorrection, 4494 &errorCorrectionType, sizeof(errorCorrectionType)); 4495} 4496 4497status_t ACodec::setVideoFormatOnPort( 4498 OMX_U32 portIndex, 4499 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4500 float frameRate) { 4501 OMX_PARAM_PORTDEFINITIONTYPE def; 4502 InitOMXParams(&def); 4503 def.nPortIndex = portIndex; 4504 4505 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4506 4507 status_t err = mOMXNode->getParameter( 4508 OMX_IndexParamPortDefinition, &def, sizeof(def)); 4509 if (err != OK) { 4510 return err; 4511 } 4512 4513 if (portIndex == kPortIndexInput) { 4514 // XXX Need a (much) better heuristic to compute input buffer sizes. 4515 const size_t X = 64 * 1024; 4516 if (def.nBufferSize < X) { 4517 def.nBufferSize = X; 4518 } 4519 } 4520 4521 if (def.eDomain != OMX_PortDomainVideo) { 4522 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4523 return FAILED_TRANSACTION; 4524 } 4525 4526 video_def->nFrameWidth = width; 4527 video_def->nFrameHeight = height; 4528 4529 if (portIndex == kPortIndexInput) { 4530 video_def->eCompressionFormat = compressionFormat; 4531 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4532 if (frameRate >= 0) { 4533 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4534 } 4535 } 4536 4537 err = mOMXNode->setParameter( 4538 OMX_IndexParamPortDefinition, &def, sizeof(def)); 4539 4540 return err; 4541} 4542 4543status_t ACodec::initNativeWindow() { 4544 if (mNativeWindow != NULL) { 4545 return mOMXNode->enableNativeBuffers(kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4546 } 4547 4548 mOMXNode->enableNativeBuffers(kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4549 return OK; 4550} 4551 4552size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4553 size_t n = 0; 4554 4555 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4556 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4557 4558 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4559 ++n; 4560 } 4561 } 4562 4563 return n; 4564} 4565 4566size_t ACodec::countBuffersOwnedByNativeWindow() const { 4567 size_t n = 0; 4568 4569 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4570 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4571 4572 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4573 ++n; 4574 } 4575 } 4576 4577 return n; 4578} 4579 4580void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4581 if (mNativeWindow == NULL) { 4582 return; 4583 } 4584 4585 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4586 && dequeueBufferFromNativeWindow() != NULL) { 4587 // these buffers will be submitted as regular buffers; account for this 4588 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4589 --mMetadataBuffersToSubmit; 4590 } 4591 } 4592} 4593 4594bool ACodec::allYourBuffersAreBelongToUs( 4595 OMX_U32 portIndex) { 4596 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4597 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4598 4599 if (info->mStatus != BufferInfo::OWNED_BY_US 4600 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4601 ALOGV("[%s] Buffer %u on port %u still has status %d", 4602 mComponentName.c_str(), 4603 info->mBufferID, portIndex, info->mStatus); 4604 return false; 4605 } 4606 } 4607 4608 return true; 4609} 4610 4611bool ACodec::allYourBuffersAreBelongToUs() { 4612 return allYourBuffersAreBelongToUs(kPortIndexInput) 4613 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4614} 4615 4616void ACodec::deferMessage(const sp<AMessage> &msg) { 4617 mDeferredQueue.push_back(msg); 4618} 4619 4620void ACodec::processDeferredMessages() { 4621 List<sp<AMessage> > queue = mDeferredQueue; 4622 mDeferredQueue.clear(); 4623 4624 List<sp<AMessage> >::iterator it = queue.begin(); 4625 while (it != queue.end()) { 4626 onMessageReceived(*it++); 4627 } 4628} 4629 4630status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4631 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4632 OMX_PARAM_PORTDEFINITIONTYPE def; 4633 InitOMXParams(&def); 4634 def.nPortIndex = portIndex; 4635 4636 status_t err = mOMXNode->getParameter(OMX_IndexParamPortDefinition, &def, sizeof(def)); 4637 if (err != OK) { 4638 return err; 4639 } 4640 4641 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4642 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4643 return BAD_VALUE; 4644 } 4645 4646 switch (def.eDomain) { 4647 case OMX_PortDomainVideo: 4648 { 4649 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4650 switch ((int)videoDef->eCompressionFormat) { 4651 case OMX_VIDEO_CodingUnused: 4652 { 4653 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4654 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4655 4656 notify->setInt32("stride", videoDef->nStride); 4657 notify->setInt32("slice-height", videoDef->nSliceHeight); 4658 notify->setInt32("color-format", videoDef->eColorFormat); 4659 4660 if (mNativeWindow == NULL) { 4661 DescribeColorFormat2Params describeParams; 4662 InitOMXParams(&describeParams); 4663 describeParams.eColorFormat = videoDef->eColorFormat; 4664 describeParams.nFrameWidth = videoDef->nFrameWidth; 4665 describeParams.nFrameHeight = videoDef->nFrameHeight; 4666 describeParams.nStride = videoDef->nStride; 4667 describeParams.nSliceHeight = videoDef->nSliceHeight; 4668 describeParams.bUsingNativeBuffers = OMX_FALSE; 4669 4670 if (DescribeColorFormat(mOMXNode, describeParams)) { 4671 notify->setBuffer( 4672 "image-data", 4673 ABuffer::CreateAsCopy( 4674 &describeParams.sMediaImage, 4675 sizeof(describeParams.sMediaImage))); 4676 4677 MediaImage2 &img = describeParams.sMediaImage; 4678 MediaImage2::PlaneInfo *plane = img.mPlane; 4679 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4680 mComponentName.c_str(), img.mWidth, img.mHeight, 4681 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4682 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4683 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4684 } 4685 } 4686 4687 int32_t width = (int32_t)videoDef->nFrameWidth; 4688 int32_t height = (int32_t)videoDef->nFrameHeight; 4689 4690 if (portIndex == kPortIndexOutput) { 4691 OMX_CONFIG_RECTTYPE rect; 4692 InitOMXParams(&rect); 4693 rect.nPortIndex = portIndex; 4694 4695 if (mOMXNode->getConfig( 4696 (portIndex == kPortIndexOutput ? 4697 OMX_IndexConfigCommonOutputCrop : 4698 OMX_IndexConfigCommonInputCrop), 4699 &rect, sizeof(rect)) != OK) { 4700 rect.nLeft = 0; 4701 rect.nTop = 0; 4702 rect.nWidth = videoDef->nFrameWidth; 4703 rect.nHeight = videoDef->nFrameHeight; 4704 } 4705 4706 if (rect.nLeft < 0 || 4707 rect.nTop < 0 || 4708 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4709 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4710 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 4711 rect.nLeft, rect.nTop, 4712 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 4713 videoDef->nFrameWidth, videoDef->nFrameHeight); 4714 return BAD_VALUE; 4715 } 4716 4717 notify->setRect( 4718 "crop", 4719 rect.nLeft, 4720 rect.nTop, 4721 rect.nLeft + rect.nWidth - 1, 4722 rect.nTop + rect.nHeight - 1); 4723 4724 width = rect.nWidth; 4725 height = rect.nHeight; 4726 4727 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4728 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4729 width, height, mConfigFormat, notify, 4730 mUsingNativeWindow ? &dataSpace : NULL); 4731 if (mUsingNativeWindow) { 4732 notify->setInt32("android._dataspace", dataSpace); 4733 } 4734 (void)getHDRStaticInfoForVideoCodec(kPortIndexOutput, notify); 4735 } else { 4736 (void)getInputColorAspectsForVideoEncoder(notify); 4737 if (mConfigFormat->contains("hdr-static-info")) { 4738 (void)getHDRStaticInfoForVideoCodec(kPortIndexInput, notify); 4739 } 4740 } 4741 4742 break; 4743 } 4744 4745 case OMX_VIDEO_CodingVP8: 4746 case OMX_VIDEO_CodingVP9: 4747 { 4748 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4749 InitOMXParams(&vp8type); 4750 vp8type.nPortIndex = kPortIndexOutput; 4751 status_t err = mOMXNode->getParameter( 4752 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4753 &vp8type, 4754 sizeof(vp8type)); 4755 4756 if (err == OK) { 4757 if (vp8type.eTemporalPattern == OMX_VIDEO_VPXTemporalLayerPatternWebRTC 4758 && vp8type.nTemporalLayerCount > 0 4759 && vp8type.nTemporalLayerCount 4760 <= OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS) { 4761 // advertise as android.generic if we configured for android.generic 4762 AString origSchema; 4763 if (notify->findString("ts-schema", &origSchema) 4764 && origSchema.startsWith("android.generic")) { 4765 notify->setString("ts-schema", AStringPrintf( 4766 "android.generic.%u", vp8type.nTemporalLayerCount)); 4767 } else { 4768 notify->setString("ts-schema", AStringPrintf( 4769 "webrtc.vp8.%u-layer", vp8type.nTemporalLayerCount)); 4770 } 4771 } 4772 } 4773 // Fall through to set up mime. 4774 } 4775 4776 default: 4777 { 4778 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 4779 // should be CodingUnused 4780 ALOGE("Raw port video compression format is %s(%d)", 4781 asString(videoDef->eCompressionFormat), 4782 videoDef->eCompressionFormat); 4783 return BAD_VALUE; 4784 } 4785 AString mime; 4786 if (GetMimeTypeForVideoCoding( 4787 videoDef->eCompressionFormat, &mime) != OK) { 4788 notify->setString("mime", "application/octet-stream"); 4789 } else { 4790 notify->setString("mime", mime.c_str()); 4791 } 4792 uint32_t intraRefreshPeriod = 0; 4793 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 4794 && intraRefreshPeriod > 0) { 4795 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 4796 } 4797 break; 4798 } 4799 } 4800 notify->setInt32("width", videoDef->nFrameWidth); 4801 notify->setInt32("height", videoDef->nFrameHeight); 4802 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 4803 portIndex == kPortIndexInput ? "input" : "output", 4804 notify->debugString().c_str()); 4805 4806 break; 4807 } 4808 4809 case OMX_PortDomainAudio: 4810 { 4811 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 4812 4813 switch ((int)audioDef->eEncoding) { 4814 case OMX_AUDIO_CodingPCM: 4815 { 4816 OMX_AUDIO_PARAM_PCMMODETYPE params; 4817 InitOMXParams(¶ms); 4818 params.nPortIndex = portIndex; 4819 4820 err = mOMXNode->getParameter( 4821 OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4822 if (err != OK) { 4823 return err; 4824 } 4825 4826 if (params.nChannels <= 0 4827 || (params.nChannels != 1 && !params.bInterleaved) 4828 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 4829 ALOGE("unsupported PCM port: %u channels%s, %u-bit", 4830 params.nChannels, 4831 params.bInterleaved ? " interleaved" : "", 4832 params.nBitPerSample); 4833 return FAILED_TRANSACTION; 4834 } 4835 4836 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 4837 notify->setInt32("channel-count", params.nChannels); 4838 notify->setInt32("sample-rate", params.nSamplingRate); 4839 4840 AudioEncoding encoding = kAudioEncodingPcm16bit; 4841 if (params.eNumData == OMX_NumericalDataUnsigned 4842 && params.nBitPerSample == 8u) { 4843 encoding = kAudioEncodingPcm8bit; 4844 } else if (params.eNumData == OMX_NumericalDataFloat 4845 && params.nBitPerSample == 32u) { 4846 encoding = kAudioEncodingPcmFloat; 4847 } else if (params.nBitPerSample != 16u 4848 || params.eNumData != OMX_NumericalDataSigned) { 4849 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", 4850 asString(params.eNumData), params.eNumData, 4851 asString(params.ePCMMode), params.ePCMMode); 4852 return FAILED_TRANSACTION; 4853 } 4854 notify->setInt32("pcm-encoding", encoding); 4855 4856 if (mChannelMaskPresent) { 4857 notify->setInt32("channel-mask", mChannelMask); 4858 } 4859 break; 4860 } 4861 4862 case OMX_AUDIO_CodingAAC: 4863 { 4864 OMX_AUDIO_PARAM_AACPROFILETYPE params; 4865 InitOMXParams(¶ms); 4866 params.nPortIndex = portIndex; 4867 4868 err = mOMXNode->getParameter( 4869 OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 4870 if (err != OK) { 4871 return err; 4872 } 4873 4874 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 4875 notify->setInt32("channel-count", params.nChannels); 4876 notify->setInt32("sample-rate", params.nSampleRate); 4877 break; 4878 } 4879 4880 case OMX_AUDIO_CodingAMR: 4881 { 4882 OMX_AUDIO_PARAM_AMRTYPE params; 4883 InitOMXParams(¶ms); 4884 params.nPortIndex = portIndex; 4885 4886 err = mOMXNode->getParameter( 4887 OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 4888 if (err != OK) { 4889 return err; 4890 } 4891 4892 notify->setInt32("channel-count", 1); 4893 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 4894 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 4895 notify->setInt32("sample-rate", 16000); 4896 } else { 4897 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 4898 notify->setInt32("sample-rate", 8000); 4899 } 4900 break; 4901 } 4902 4903 case OMX_AUDIO_CodingFLAC: 4904 { 4905 OMX_AUDIO_PARAM_FLACTYPE params; 4906 InitOMXParams(¶ms); 4907 params.nPortIndex = portIndex; 4908 4909 err = mOMXNode->getParameter( 4910 OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 4911 if (err != OK) { 4912 return err; 4913 } 4914 4915 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 4916 notify->setInt32("channel-count", params.nChannels); 4917 notify->setInt32("sample-rate", params.nSampleRate); 4918 break; 4919 } 4920 4921 case OMX_AUDIO_CodingMP3: 4922 { 4923 OMX_AUDIO_PARAM_MP3TYPE params; 4924 InitOMXParams(¶ms); 4925 params.nPortIndex = portIndex; 4926 4927 err = mOMXNode->getParameter( 4928 OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 4929 if (err != OK) { 4930 return err; 4931 } 4932 4933 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 4934 notify->setInt32("channel-count", params.nChannels); 4935 notify->setInt32("sample-rate", params.nSampleRate); 4936 break; 4937 } 4938 4939 case OMX_AUDIO_CodingVORBIS: 4940 { 4941 OMX_AUDIO_PARAM_VORBISTYPE params; 4942 InitOMXParams(¶ms); 4943 params.nPortIndex = portIndex; 4944 4945 err = mOMXNode->getParameter( 4946 OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 4947 if (err != OK) { 4948 return err; 4949 } 4950 4951 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 4952 notify->setInt32("channel-count", params.nChannels); 4953 notify->setInt32("sample-rate", params.nSampleRate); 4954 break; 4955 } 4956 4957 case OMX_AUDIO_CodingAndroidAC3: 4958 { 4959 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 4960 InitOMXParams(¶ms); 4961 params.nPortIndex = portIndex; 4962 4963 err = mOMXNode->getParameter( 4964 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 4965 ¶ms, sizeof(params)); 4966 if (err != OK) { 4967 return err; 4968 } 4969 4970 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 4971 notify->setInt32("channel-count", params.nChannels); 4972 notify->setInt32("sample-rate", params.nSampleRate); 4973 break; 4974 } 4975 4976 case OMX_AUDIO_CodingAndroidEAC3: 4977 { 4978 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 4979 InitOMXParams(¶ms); 4980 params.nPortIndex = portIndex; 4981 4982 err = mOMXNode->getParameter( 4983 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 4984 ¶ms, sizeof(params)); 4985 if (err != OK) { 4986 return err; 4987 } 4988 4989 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 4990 notify->setInt32("channel-count", params.nChannels); 4991 notify->setInt32("sample-rate", params.nSampleRate); 4992 break; 4993 } 4994 4995 case OMX_AUDIO_CodingAndroidOPUS: 4996 { 4997 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 4998 InitOMXParams(¶ms); 4999 params.nPortIndex = portIndex; 5000 5001 err = mOMXNode->getParameter( 5002 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 5003 ¶ms, sizeof(params)); 5004 if (err != OK) { 5005 return err; 5006 } 5007 5008 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 5009 notify->setInt32("channel-count", params.nChannels); 5010 notify->setInt32("sample-rate", params.nSampleRate); 5011 break; 5012 } 5013 5014 case OMX_AUDIO_CodingG711: 5015 { 5016 OMX_AUDIO_PARAM_PCMMODETYPE params; 5017 InitOMXParams(¶ms); 5018 params.nPortIndex = portIndex; 5019 5020 err = mOMXNode->getParameter( 5021 (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5022 if (err != OK) { 5023 return err; 5024 } 5025 5026 const char *mime = NULL; 5027 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 5028 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 5029 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 5030 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 5031 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 5032 mime = MEDIA_MIMETYPE_AUDIO_RAW; 5033 } 5034 notify->setString("mime", mime); 5035 notify->setInt32("channel-count", params.nChannels); 5036 notify->setInt32("sample-rate", params.nSamplingRate); 5037 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 5038 break; 5039 } 5040 5041 case OMX_AUDIO_CodingGSMFR: 5042 { 5043 OMX_AUDIO_PARAM_PCMMODETYPE params; 5044 InitOMXParams(¶ms); 5045 params.nPortIndex = portIndex; 5046 5047 err = mOMXNode->getParameter( 5048 OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5049 if (err != OK) { 5050 return err; 5051 } 5052 5053 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 5054 notify->setInt32("channel-count", params.nChannels); 5055 notify->setInt32("sample-rate", params.nSamplingRate); 5056 break; 5057 } 5058 5059 default: 5060 ALOGE("Unsupported audio coding: %s(%d)\n", 5061 asString(audioDef->eEncoding), audioDef->eEncoding); 5062 return BAD_TYPE; 5063 } 5064 break; 5065 } 5066 5067 default: 5068 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 5069 return BAD_TYPE; 5070 } 5071 5072 return OK; 5073} 5074 5075void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 5076 // aspects are normally communicated in ColorAspects 5077 int32_t range, standard, transfer; 5078 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 5079 5080 // if some aspects are unspecified, use dataspace fields 5081 if (range != 0) { 5082 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 5083 } 5084 if (standard != 0) { 5085 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 5086 } 5087 if (transfer != 0) { 5088 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 5089 } 5090 5091 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 5092 if (range != 0) { 5093 mOutputFormat->setInt32("color-range", range); 5094 } 5095 if (standard != 0) { 5096 mOutputFormat->setInt32("color-standard", standard); 5097 } 5098 if (transfer != 0) { 5099 mOutputFormat->setInt32("color-transfer", transfer); 5100 } 5101 5102 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 5103 "(R:%d(%s), S:%d(%s), T:%d(%s))", 5104 dataSpace, 5105 aspects.mRange, asString(aspects.mRange), 5106 aspects.mPrimaries, asString(aspects.mPrimaries), 5107 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 5108 aspects.mTransfer, asString(aspects.mTransfer), 5109 range, asString((ColorRange)range), 5110 standard, asString((ColorStandard)standard), 5111 transfer, asString((ColorTransfer)transfer)); 5112} 5113 5114void ACodec::onOutputFormatChanged(sp<const AMessage> expectedFormat) { 5115 // store new output format, at the same time mark that this is no longer the first frame 5116 mOutputFormat = mBaseOutputFormat->dup(); 5117 5118 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 5119 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 5120 return; 5121 } 5122 5123 if (expectedFormat != NULL) { 5124 sp<const AMessage> changes = expectedFormat->changesFrom(mOutputFormat); 5125 sp<const AMessage> to = mOutputFormat->changesFrom(expectedFormat); 5126 if (changes->countEntries() != 0 || to->countEntries() != 0) { 5127 ALOGW("[%s] BAD CODEC: Output format changed unexpectedly from (diff) %s to (diff) %s", 5128 mComponentName.c_str(), 5129 changes->debugString(4).c_str(), to->debugString(4).c_str()); 5130 } 5131 } 5132 5133 if (!mIsVideo && !mIsEncoder) { 5134 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 5135 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5136 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 5137 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5138 5139 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 5140 if (mConverter[kPortIndexOutput] != NULL) { 5141 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 5142 } 5143 } 5144 5145 if (mTunneled) { 5146 sendFormatChange(); 5147 } 5148} 5149 5150void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 5151 AString mime; 5152 CHECK(mOutputFormat->findString("mime", &mime)); 5153 5154 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 5155 // notify renderer of the crop change and dataspace change 5156 // NOTE: native window uses extended right-bottom coordinate 5157 int32_t left, top, right, bottom; 5158 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 5159 notify->setRect("crop", left, top, right + 1, bottom + 1); 5160 } 5161 5162 int32_t dataSpace; 5163 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 5164 notify->setInt32("dataspace", dataSpace); 5165 } 5166 } 5167} 5168 5169void ACodec::sendFormatChange() { 5170 AString mime; 5171 CHECK(mOutputFormat->findString("mime", &mime)); 5172 5173 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 5174 int32_t channelCount, sampleRate; 5175 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 5176 CHECK(mOutputFormat->findInt32("sample-rate", &sampleRate)); 5177 if (mSampleRate != 0 && sampleRate != 0) { 5178 mEncoderDelay = mEncoderDelay * sampleRate / mSampleRate; 5179 mEncoderPadding = mEncoderPadding * sampleRate / mSampleRate; 5180 mSampleRate = sampleRate; 5181 } 5182 if (mSkipCutBuffer != NULL) { 5183 size_t prevbufsize = mSkipCutBuffer->size(); 5184 if (prevbufsize != 0) { 5185 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 5186 } 5187 } 5188 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 5189 } 5190 5191 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 5192 mLastOutputFormat = mOutputFormat; 5193} 5194 5195void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 5196 sp<AMessage> notify = mNotify->dup(); 5197 notify->setInt32("what", CodecBase::kWhatError); 5198 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 5199 5200 if (internalError == UNKNOWN_ERROR) { // find better error code 5201 const status_t omxStatus = statusFromOMXError(error); 5202 if (omxStatus != 0) { 5203 internalError = omxStatus; 5204 } else { 5205 ALOGW("Invalid OMX error %#x", error); 5206 } 5207 } 5208 5209 mFatalError = true; 5210 5211 notify->setInt32("err", internalError); 5212 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5213 notify->post(); 5214} 5215 5216status_t ACodec::requestIDRFrame() { 5217 if (!mIsEncoder) { 5218 return ERROR_UNSUPPORTED; 5219 } 5220 5221 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5222 InitOMXParams(¶ms); 5223 5224 params.nPortIndex = kPortIndexOutput; 5225 params.IntraRefreshVOP = OMX_TRUE; 5226 5227 return mOMXNode->setConfig( 5228 OMX_IndexConfigVideoIntraVOPRefresh, 5229 ¶ms, 5230 sizeof(params)); 5231} 5232 5233//////////////////////////////////////////////////////////////////////////////// 5234 5235ACodec::PortDescription::PortDescription() { 5236} 5237 5238void ACodec::PortDescription::addBuffer( 5239 IOMX::buffer_id id, const sp<MediaCodecBuffer> &buffer) { 5240 mBufferIDs.push_back(id); 5241 mBuffers.push_back(buffer); 5242} 5243 5244size_t ACodec::PortDescription::countBuffers() { 5245 return mBufferIDs.size(); 5246} 5247 5248IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5249 return mBufferIDs.itemAt(index); 5250} 5251 5252sp<MediaCodecBuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5253 return mBuffers.itemAt(index); 5254} 5255 5256//////////////////////////////////////////////////////////////////////////////// 5257 5258ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5259 : AState(parentState), 5260 mCodec(codec) { 5261} 5262 5263ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5264 OMX_U32 /* portIndex */) { 5265 return KEEP_BUFFERS; 5266} 5267 5268bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5269 switch (msg->what()) { 5270 case kWhatInputBufferFilled: 5271 { 5272 onInputBufferFilled(msg); 5273 break; 5274 } 5275 5276 case kWhatOutputBufferDrained: 5277 { 5278 onOutputBufferDrained(msg); 5279 break; 5280 } 5281 5282 case ACodec::kWhatOMXMessageList: 5283 { 5284 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5285 } 5286 5287 case ACodec::kWhatOMXMessageItem: 5288 { 5289 // no need to check as we already did it for kWhatOMXMessageList 5290 return onOMXMessage(msg); 5291 } 5292 5293 case ACodec::kWhatOMXMessage: 5294 { 5295 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5296 } 5297 5298 case ACodec::kWhatSetSurface: 5299 { 5300 sp<AReplyToken> replyID; 5301 CHECK(msg->senderAwaitsResponse(&replyID)); 5302 5303 sp<RefBase> obj; 5304 CHECK(msg->findObject("surface", &obj)); 5305 5306 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5307 5308 sp<AMessage> response = new AMessage; 5309 response->setInt32("err", err); 5310 response->postReply(replyID); 5311 break; 5312 } 5313 5314 case ACodec::kWhatCreateInputSurface: 5315 case ACodec::kWhatSetInputSurface: 5316 case ACodec::kWhatSignalEndOfInputStream: 5317 { 5318 // This may result in an app illegal state exception. 5319 ALOGE("Message 0x%x was not handled", msg->what()); 5320 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5321 return true; 5322 } 5323 5324 case ACodec::kWhatOMXDied: 5325 { 5326 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5327 ALOGE("OMX/mediaserver died, signalling error!"); 5328 mCodec->mGraphicBufferSource.clear(); 5329 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5330 break; 5331 } 5332 5333 case ACodec::kWhatReleaseCodecInstance: 5334 { 5335 ALOGI("[%s] forcing the release of codec", 5336 mCodec->mComponentName.c_str()); 5337 status_t err = mCodec->mOMXNode->freeNode(); 5338 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5339 mCodec->mComponentName.c_str(), err); 5340 sp<AMessage> notify = mCodec->mNotify->dup(); 5341 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5342 notify->post(); 5343 break; 5344 } 5345 5346 default: 5347 return false; 5348 } 5349 5350 return true; 5351} 5352 5353bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5354 // there is a possibility that this is an outstanding message for a 5355 // codec that we have already destroyed 5356 if (mCodec->mOMXNode == NULL) { 5357 ALOGI("ignoring message as already freed component: %s", 5358 msg->debugString().c_str()); 5359 return false; 5360 } 5361 5362 int32_t generation; 5363 CHECK(msg->findInt32("generation", (int32_t*)&generation)); 5364 if (generation != mCodec->mNodeGeneration) { 5365 ALOGW("Unexpected message for component: %s, gen %u, cur %u", 5366 msg->debugString().c_str(), generation, mCodec->mNodeGeneration); 5367 return false; 5368 } 5369 return true; 5370} 5371 5372bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5373 sp<RefBase> obj; 5374 CHECK(msg->findObject("messages", &obj)); 5375 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5376 5377 bool receivedRenderedEvents = false; 5378 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5379 it != msgList->getList().cend(); ++it) { 5380 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5381 mCodec->handleMessage(*it); 5382 int32_t type; 5383 CHECK((*it)->findInt32("type", &type)); 5384 if (type == omx_message::FRAME_RENDERED) { 5385 receivedRenderedEvents = true; 5386 } 5387 } 5388 5389 if (receivedRenderedEvents) { 5390 // NOTE: all buffers are rendered in this case 5391 mCodec->notifyOfRenderedFrames(); 5392 } 5393 return true; 5394} 5395 5396bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5397 int32_t type; 5398 CHECK(msg->findInt32("type", &type)); 5399 5400 switch (type) { 5401 case omx_message::EVENT: 5402 { 5403 int32_t event, data1, data2; 5404 CHECK(msg->findInt32("event", &event)); 5405 CHECK(msg->findInt32("data1", &data1)); 5406 CHECK(msg->findInt32("data2", &data2)); 5407 5408 if (event == OMX_EventCmdComplete 5409 && data1 == OMX_CommandFlush 5410 && data2 == (int32_t)OMX_ALL) { 5411 // Use of this notification is not consistent across 5412 // implementations. We'll drop this notification and rely 5413 // on flush-complete notifications on the individual port 5414 // indices instead. 5415 5416 return true; 5417 } 5418 5419 return onOMXEvent( 5420 static_cast<OMX_EVENTTYPE>(event), 5421 static_cast<OMX_U32>(data1), 5422 static_cast<OMX_U32>(data2)); 5423 } 5424 5425 case omx_message::EMPTY_BUFFER_DONE: 5426 { 5427 IOMX::buffer_id bufferID; 5428 int32_t fenceFd; 5429 5430 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5431 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5432 5433 return onOMXEmptyBufferDone(bufferID, fenceFd); 5434 } 5435 5436 case omx_message::FILL_BUFFER_DONE: 5437 { 5438 IOMX::buffer_id bufferID; 5439 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5440 5441 int32_t rangeOffset, rangeLength, flags, fenceFd; 5442 int64_t timeUs; 5443 5444 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5445 CHECK(msg->findInt32("range_length", &rangeLength)); 5446 CHECK(msg->findInt32("flags", &flags)); 5447 CHECK(msg->findInt64("timestamp", &timeUs)); 5448 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5449 5450 return onOMXFillBufferDone( 5451 bufferID, 5452 (size_t)rangeOffset, (size_t)rangeLength, 5453 (OMX_U32)flags, 5454 timeUs, 5455 fenceFd); 5456 } 5457 5458 case omx_message::FRAME_RENDERED: 5459 { 5460 int64_t mediaTimeUs, systemNano; 5461 5462 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5463 CHECK(msg->findInt64("system_nano", &systemNano)); 5464 5465 return onOMXFrameRendered( 5466 mediaTimeUs, systemNano); 5467 } 5468 5469 default: 5470 ALOGE("Unexpected message type: %d", type); 5471 return false; 5472 } 5473} 5474 5475bool ACodec::BaseState::onOMXFrameRendered( 5476 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5477 // ignore outside of Executing and PortSettingsChanged states 5478 return true; 5479} 5480 5481bool ACodec::BaseState::onOMXEvent( 5482 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5483 if (event == OMX_EventDataSpaceChanged) { 5484 ColorAspects aspects = ColorUtils::unpackToColorAspects(data2); 5485 5486 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5487 return true; 5488 } 5489 5490 if (event != OMX_EventError) { 5491 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5492 mCodec->mComponentName.c_str(), event, data1, data2); 5493 5494 return false; 5495 } 5496 5497 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5498 5499 // verify OMX component sends back an error we expect. 5500 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5501 if (!isOMXError(omxError)) { 5502 ALOGW("Invalid OMX error %#x", omxError); 5503 omxError = OMX_ErrorUndefined; 5504 } 5505 mCodec->signalError(omxError); 5506 5507 return true; 5508} 5509 5510bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5511 ALOGV("[%s] onOMXEmptyBufferDone %u", 5512 mCodec->mComponentName.c_str(), bufferID); 5513 5514 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5515 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5516 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5517 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5518 mCodec->dumpBuffers(kPortIndexInput); 5519 if (fenceFd >= 0) { 5520 ::close(fenceFd); 5521 } 5522 return false; 5523 } 5524 info->mStatus = BufferInfo::OWNED_BY_US; 5525 5526 // input buffers cannot take fences, so wait for any fence now 5527 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5528 fenceFd = -1; 5529 5530 // still save fence for completeness 5531 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5532 5533 // We're in "store-metadata-in-buffers" mode, the underlying 5534 // OMX component had access to data that's implicitly refcounted 5535 // by this "MediaBuffer" object. Now that the OMX component has 5536 // told us that it's done with the input buffer, we can decrement 5537 // the mediaBuffer's reference count. 5538 info->mData->setMediaBufferBase(NULL); 5539 5540 PortMode mode = getPortMode(kPortIndexInput); 5541 5542 switch (mode) { 5543 case KEEP_BUFFERS: 5544 break; 5545 5546 case RESUBMIT_BUFFERS: 5547 postFillThisBuffer(info); 5548 break; 5549 5550 case FREE_BUFFERS: 5551 default: 5552 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5553 return false; 5554 } 5555 5556 return true; 5557} 5558 5559void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5560 if (mCodec->mPortEOS[kPortIndexInput]) { 5561 return; 5562 } 5563 5564 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5565 5566 sp<AMessage> notify = mCodec->mNotify->dup(); 5567 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5568 notify->setInt32("buffer-id", info->mBufferID); 5569 5570 notify->setObject("buffer", info->mData->clone(mCodec->mInputFormat)); 5571 info->mData.clear(); 5572 5573 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5574 reply->setInt32("buffer-id", info->mBufferID); 5575 5576 notify->setMessage("reply", reply); 5577 5578 notify->post(); 5579 5580 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5581} 5582 5583void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5584 IOMX::buffer_id bufferID; 5585 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5586 sp<MediaCodecBuffer> buffer; 5587 int32_t err = OK; 5588 bool eos = false; 5589 PortMode mode = getPortMode(kPortIndexInput); 5590 5591 sp<RefBase> obj; 5592 if (!msg->findObject("buffer", &obj)) { 5593 /* these are unfilled buffers returned by client */ 5594 CHECK(msg->findInt32("err", &err)); 5595 5596 if (err == OK) { 5597 /* buffers with no errors are returned on MediaCodec.flush */ 5598 mode = KEEP_BUFFERS; 5599 } else { 5600 ALOGV("[%s] saw error %d instead of an input buffer", 5601 mCodec->mComponentName.c_str(), err); 5602 eos = true; 5603 } 5604 } else { 5605 buffer = static_cast<MediaCodecBuffer *>(obj.get()); 5606 } 5607 5608 int32_t tmp; 5609 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5610 eos = true; 5611 err = ERROR_END_OF_STREAM; 5612 } 5613 5614 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5615 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5616 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5617 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5618 mCodec->dumpBuffers(kPortIndexInput); 5619 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5620 return; 5621 } 5622 5623 info->mStatus = BufferInfo::OWNED_BY_US; 5624 info->mData = buffer; 5625 5626 switch (mode) { 5627 case KEEP_BUFFERS: 5628 { 5629 if (eos) { 5630 if (!mCodec->mPortEOS[kPortIndexInput]) { 5631 mCodec->mPortEOS[kPortIndexInput] = true; 5632 mCodec->mInputEOSResult = err; 5633 } 5634 } 5635 break; 5636 } 5637 5638 case RESUBMIT_BUFFERS: 5639 { 5640 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5641 // Do not send empty input buffer w/o EOS to the component. 5642 if (buffer->size() == 0 && !eos) { 5643 postFillThisBuffer(info); 5644 break; 5645 } 5646 5647 int64_t timeUs; 5648 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5649 5650 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5651 5652 MetadataBufferType metaType = mCodec->mInputMetadataType; 5653 int32_t isCSD = 0; 5654 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5655 if (mCodec->mIsLegacyVP9Decoder) { 5656 ALOGV("[%s] is legacy VP9 decoder. Ignore %u codec specific data", 5657 mCodec->mComponentName.c_str(), bufferID); 5658 postFillThisBuffer(info); 5659 break; 5660 } 5661 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5662 metaType = kMetadataBufferTypeInvalid; 5663 } 5664 5665 if (eos) { 5666 flags |= OMX_BUFFERFLAG_EOS; 5667 } 5668 5669 size_t size = buffer->size(); 5670 if (buffer->base() != info->mCodecData->base()) { 5671 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5672 mCodec->mComponentName.c_str(), 5673 bufferID, 5674 buffer->base(), info->mCodecData->base()); 5675 5676 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; 5677 if (converter == NULL || isCSD) { 5678 converter = getCopyConverter(); 5679 } 5680 status_t err = converter->convert(buffer, info->mCodecData); 5681 if (err != OK) { 5682 mCodec->signalError(OMX_ErrorUndefined, err); 5683 return; 5684 } 5685 size = info->mCodecData->size(); 5686 } 5687 5688 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5689 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5690 mCodec->mComponentName.c_str(), bufferID); 5691 } else if (flags & OMX_BUFFERFLAG_EOS) { 5692 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5693 mCodec->mComponentName.c_str(), bufferID); 5694 } else { 5695#if TRACK_BUFFER_TIMING 5696 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5697 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5698#else 5699 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5700 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5701#endif 5702 } 5703 5704#if TRACK_BUFFER_TIMING 5705 ACodec::BufferStats stats; 5706 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5707 stats.mFillBufferDoneTimeUs = -1ll; 5708 mCodec->mBufferStats.add(timeUs, stats); 5709#endif 5710 5711 if (mCodec->storingMetadataInDecodedBuffers()) { 5712 // try to submit an output buffer for each input buffer 5713 PortMode outputMode = getPortMode(kPortIndexOutput); 5714 5715 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 5716 mCodec->mMetadataBuffersToSubmit, 5717 (outputMode == FREE_BUFFERS ? "FREE" : 5718 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 5719 if (outputMode == RESUBMIT_BUFFERS) { 5720 mCodec->submitOutputMetadataBuffer(); 5721 } 5722 } 5723 info->checkReadFence("onInputBufferFilled"); 5724 5725 status_t err2 = OK; 5726 switch (metaType) { 5727 case kMetadataBufferTypeInvalid: 5728 break; 5729#ifndef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5730 case kMetadataBufferTypeNativeHandleSource: 5731 if (info->mCodecData->size() >= sizeof(VideoNativeHandleMetadata)) { 5732 VideoNativeHandleMetadata *vnhmd = 5733 (VideoNativeHandleMetadata*)info->mCodecData->base(); 5734 err2 = mCodec->mOMXNode->updateNativeHandleInMeta( 5735 mCodec->kPortIndexInput, 5736 NativeHandle::create(vnhmd->pHandle, false /* ownsHandle */), 5737 bufferID); 5738 } 5739 break; 5740 case kMetadataBufferTypeANWBuffer: 5741 if (info->mCodecData->size() >= sizeof(VideoNativeMetadata)) { 5742 VideoNativeMetadata *vnmd = (VideoNativeMetadata*)info->mCodecData->base(); 5743 err2 = mCodec->mOMXNode->updateGraphicBufferInMeta( 5744 mCodec->kPortIndexInput, 5745 new GraphicBuffer(vnmd->pBuffer, false /* keepOwnership */), 5746 bufferID); 5747 } 5748 break; 5749#endif 5750 default: 5751 ALOGW("Can't marshall %s data in %zu sized buffers in %zu-bit mode", 5752 asString(metaType), info->mCodecData->size(), 5753 sizeof(buffer_handle_t) * 8); 5754 err2 = ERROR_UNSUPPORTED; 5755 break; 5756 } 5757 5758 if (err2 == OK) { 5759 err2 = mCodec->mOMXNode->emptyBuffer( 5760 bufferID, 5761 0, 5762 size, 5763 flags, 5764 timeUs, 5765 info->mFenceFd); 5766 } 5767 info->mFenceFd = -1; 5768 if (err2 != OK) { 5769 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5770 return; 5771 } 5772 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5773 // Hold the reference while component is using the buffer. 5774 info->mData = buffer; 5775 5776 if (!eos && err == OK) { 5777 getMoreInputDataIfPossible(); 5778 } else { 5779 ALOGV("[%s] Signalled EOS (%d) on the input port", 5780 mCodec->mComponentName.c_str(), err); 5781 5782 mCodec->mPortEOS[kPortIndexInput] = true; 5783 mCodec->mInputEOSResult = err; 5784 } 5785 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 5786 if (err != OK && err != ERROR_END_OF_STREAM) { 5787 ALOGV("[%s] Signalling EOS on the input port due to error %d", 5788 mCodec->mComponentName.c_str(), err); 5789 } else { 5790 ALOGV("[%s] Signalling EOS on the input port", 5791 mCodec->mComponentName.c_str()); 5792 } 5793 5794 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 5795 mCodec->mComponentName.c_str(), bufferID); 5796 5797 info->checkReadFence("onInputBufferFilled"); 5798 status_t err2 = mCodec->mOMXNode->emptyBuffer( 5799 bufferID, 5800 0, 5801 0, 5802 OMX_BUFFERFLAG_EOS, 5803 0, 5804 info->mFenceFd); 5805 info->mFenceFd = -1; 5806 if (err2 != OK) { 5807 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5808 return; 5809 } 5810 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5811 5812 mCodec->mPortEOS[kPortIndexInput] = true; 5813 mCodec->mInputEOSResult = err; 5814 } 5815 break; 5816 } 5817 5818 case FREE_BUFFERS: 5819 break; 5820 5821 default: 5822 ALOGE("invalid port mode: %d", mode); 5823 break; 5824 } 5825} 5826 5827void ACodec::BaseState::getMoreInputDataIfPossible() { 5828 if (mCodec->mPortEOS[kPortIndexInput]) { 5829 return; 5830 } 5831 5832 BufferInfo *eligible = NULL; 5833 5834 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 5835 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5836 5837#if 0 5838 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 5839 // There's already a "read" pending. 5840 return; 5841 } 5842#endif 5843 5844 if (info->mStatus == BufferInfo::OWNED_BY_US) { 5845 eligible = info; 5846 } 5847 } 5848 5849 if (eligible == NULL) { 5850 return; 5851 } 5852 5853 postFillThisBuffer(eligible); 5854} 5855 5856bool ACodec::BaseState::onOMXFillBufferDone( 5857 IOMX::buffer_id bufferID, 5858 size_t rangeOffset, size_t rangeLength, 5859 OMX_U32 flags, 5860 int64_t timeUs, 5861 int fenceFd) { 5862 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 5863 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 5864 5865 ssize_t index; 5866 status_t err= OK; 5867 5868#if TRACK_BUFFER_TIMING 5869 index = mCodec->mBufferStats.indexOfKey(timeUs); 5870 if (index >= 0) { 5871 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 5872 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 5873 5874 ALOGI("frame PTS %lld: %lld", 5875 timeUs, 5876 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 5877 5878 mCodec->mBufferStats.removeItemsAt(index); 5879 stats = NULL; 5880 } 5881#endif 5882 5883 BufferInfo *info = 5884 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5885 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5886 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5887 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5888 mCodec->dumpBuffers(kPortIndexOutput); 5889 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5890 if (fenceFd >= 0) { 5891 ::close(fenceFd); 5892 } 5893 return true; 5894 } 5895 5896 info->mDequeuedAt = ++mCodec->mDequeueCounter; 5897 info->mStatus = BufferInfo::OWNED_BY_US; 5898 5899 if (info->mRenderInfo != NULL) { 5900 // The fence for an emptied buffer must have signaled, but there still could be queued 5901 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 5902 // as we will soon requeue this buffer to the surface. While in theory we could still keep 5903 // track of buffers that are requeued to the surface, it is better to add support to the 5904 // buffer-queue to notify us of released buffers and their fences (in the future). 5905 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 5906 } 5907 5908 // byte buffers cannot take fences, so wait for any fence now 5909 if (mCodec->mNativeWindow == NULL) { 5910 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 5911 fenceFd = -1; 5912 } 5913 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 5914 5915 PortMode mode = getPortMode(kPortIndexOutput); 5916 5917 switch (mode) { 5918 case KEEP_BUFFERS: 5919 break; 5920 5921 case RESUBMIT_BUFFERS: 5922 { 5923 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 5924 || mCodec->mPortEOS[kPortIndexOutput])) { 5925 ALOGV("[%s] calling fillBuffer %u", 5926 mCodec->mComponentName.c_str(), info->mBufferID); 5927 5928 err = mCodec->mOMXNode->fillBuffer(info->mBufferID, info->mFenceFd); 5929 info->mFenceFd = -1; 5930 if (err != OK) { 5931 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5932 return true; 5933 } 5934 5935 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5936 break; 5937 } 5938 5939 sp<AMessage> reply = 5940 new AMessage(kWhatOutputBufferDrained, mCodec); 5941 sp<MediaCodecBuffer> buffer = info->mData->clone(mCodec->mOutputFormat); 5942 5943 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 5944 // pretend that output format has changed on the first frame (we used to do this) 5945 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) { 5946 mCodec->onOutputFormatChanged(mCodec->mOutputFormat); 5947 } 5948 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 5949 mCodec->sendFormatChange(); 5950 } else if (rangeLength > 0 && mCodec->mNativeWindow != NULL) { 5951 // If potentially rendering onto a surface, always save key format data (crop & 5952 // data space) so that we can set it if and once the buffer is rendered. 5953 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 5954 } 5955 5956 if (mCodec->usingMetadataOnEncoderOutput()) { 5957 native_handle_t *handle = NULL; 5958 VideoNativeHandleMetadata &nativeMeta = 5959 *(VideoNativeHandleMetadata *)buffer->data(); 5960 if (buffer->size() >= sizeof(nativeMeta) 5961 && nativeMeta.eType == kMetadataBufferTypeNativeHandleSource) { 5962#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5963 // handle is only valid on 32-bit/mediaserver process 5964 handle = NULL; 5965#else 5966 handle = (native_handle_t *)nativeMeta.pHandle; 5967#endif 5968 } 5969 buffer->meta()->setPointer("handle", handle); 5970 buffer->meta()->setInt32("rangeOffset", rangeOffset); 5971 buffer->meta()->setInt32("rangeLength", rangeLength); 5972 } else if (buffer->base() == info->mCodecData->base()) { 5973 buffer->setRange(rangeOffset, rangeLength); 5974 } else { 5975 info->mCodecData->setRange(rangeOffset, rangeLength); 5976 // in this case we know that mConverter is not null 5977 status_t err = mCodec->mConverter[kPortIndexOutput]->convert( 5978 info->mCodecData, buffer); 5979 if (err != OK) { 5980 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5981 return true; 5982 } 5983 } 5984#if 0 5985 if (mCodec->mNativeWindow == NULL) { 5986 if (IsIDR(info->mData)) { 5987 ALOGI("IDR frame"); 5988 } 5989 } 5990#endif 5991 5992 if (mCodec->mSkipCutBuffer != NULL) { 5993 mCodec->mSkipCutBuffer->submit(buffer); 5994 } 5995 buffer->meta()->setInt64("timeUs", timeUs); 5996 5997 sp<AMessage> notify = mCodec->mNotify->dup(); 5998 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 5999 notify->setInt32("buffer-id", info->mBufferID); 6000 notify->setObject("buffer", buffer); 6001 info->mData.clear(); 6002 notify->setInt32("flags", flags); 6003 6004 reply->setInt32("buffer-id", info->mBufferID); 6005 6006 notify->setMessage("reply", reply); 6007 6008 notify->post(); 6009 6010 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 6011 6012 if (flags & OMX_BUFFERFLAG_EOS) { 6013 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 6014 6015 sp<AMessage> notify = mCodec->mNotify->dup(); 6016 notify->setInt32("what", CodecBase::kWhatEOS); 6017 notify->setInt32("err", mCodec->mInputEOSResult); 6018 notify->post(); 6019 6020 mCodec->mPortEOS[kPortIndexOutput] = true; 6021 } 6022 break; 6023 } 6024 6025 case FREE_BUFFERS: 6026 err = mCodec->freeBuffer(kPortIndexOutput, index); 6027 if (err != OK) { 6028 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6029 return true; 6030 } 6031 break; 6032 6033 default: 6034 ALOGE("Invalid port mode: %d", mode); 6035 return false; 6036 } 6037 6038 return true; 6039} 6040 6041void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 6042 IOMX::buffer_id bufferID; 6043 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 6044 sp<RefBase> obj; 6045 sp<MediaCodecBuffer> buffer = nullptr; 6046 if (msg->findObject("buffer", &obj)) { 6047 buffer = static_cast<MediaCodecBuffer *>(obj.get()); 6048 } 6049 ssize_t index; 6050 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6051 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6052 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 6053 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6054 mCodec->dumpBuffers(kPortIndexOutput); 6055 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6056 return; 6057 } 6058 info->mData = buffer; 6059 6060 android_native_rect_t crop; 6061 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom) 6062 && memcmp(&crop, &mCodec->mLastNativeWindowCrop, sizeof(crop)) != 0) { 6063 mCodec->mLastNativeWindowCrop = crop; 6064 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 6065 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 6066 } 6067 6068 int32_t dataSpace; 6069 if (msg->findInt32("dataspace", &dataSpace) 6070 && dataSpace != mCodec->mLastNativeWindowDataSpace) { 6071 status_t err = native_window_set_buffers_data_space( 6072 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 6073 mCodec->mLastNativeWindowDataSpace = dataSpace; 6074 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 6075 } 6076 6077 int32_t render; 6078 if (mCodec->mNativeWindow != NULL 6079 && msg->findInt32("render", &render) && render != 0 6080 && buffer != NULL && buffer->size() != 0) { 6081 ATRACE_NAME("render"); 6082 // The client wants this buffer to be rendered. 6083 6084 // save buffers sent to the surface so we can get render time when they return 6085 int64_t mediaTimeUs = -1; 6086 buffer->meta()->findInt64("timeUs", &mediaTimeUs); 6087 if (mediaTimeUs >= 0) { 6088 mCodec->mRenderTracker.onFrameQueued( 6089 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 6090 } 6091 6092 int64_t timestampNs = 0; 6093 if (!msg->findInt64("timestampNs", ×tampNs)) { 6094 // use media timestamp if client did not request a specific render timestamp 6095 if (buffer->meta()->findInt64("timeUs", ×tampNs)) { 6096 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 6097 timestampNs *= 1000; 6098 } 6099 } 6100 6101 status_t err; 6102 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 6103 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 6104 6105 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 6106 err = mCodec->mNativeWindow->queueBuffer( 6107 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 6108 info->mFenceFd = -1; 6109 if (err == OK) { 6110 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 6111 } else { 6112 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 6113 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6114 info->mStatus = BufferInfo::OWNED_BY_US; 6115 // keeping read fence as write fence to avoid clobbering 6116 info->mIsReadFence = false; 6117 } 6118 } else { 6119 if (mCodec->mNativeWindow != NULL && 6120 (buffer == NULL || buffer->size() != 0)) { 6121 // move read fence into write fence to avoid clobbering 6122 info->mIsReadFence = false; 6123 ATRACE_NAME("frame-drop"); 6124 } 6125 info->mStatus = BufferInfo::OWNED_BY_US; 6126 } 6127 6128 PortMode mode = getPortMode(kPortIndexOutput); 6129 6130 switch (mode) { 6131 case KEEP_BUFFERS: 6132 { 6133 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 6134 6135 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6136 // We cannot resubmit the buffer we just rendered, dequeue 6137 // the spare instead. 6138 6139 info = mCodec->dequeueBufferFromNativeWindow(); 6140 } 6141 break; 6142 } 6143 6144 case RESUBMIT_BUFFERS: 6145 { 6146 if (!mCodec->mPortEOS[kPortIndexOutput]) { 6147 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6148 // We cannot resubmit the buffer we just rendered, dequeue 6149 // the spare instead. 6150 6151 info = mCodec->dequeueBufferFromNativeWindow(); 6152 } 6153 6154 if (info != NULL) { 6155 ALOGV("[%s] calling fillBuffer %u", 6156 mCodec->mComponentName.c_str(), info->mBufferID); 6157 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 6158 status_t err = mCodec->mOMXNode->fillBuffer( 6159 info->mBufferID, info->mFenceFd); 6160 info->mFenceFd = -1; 6161 if (err == OK) { 6162 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6163 } else { 6164 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6165 } 6166 } 6167 } 6168 break; 6169 } 6170 6171 case FREE_BUFFERS: 6172 { 6173 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 6174 if (err != OK) { 6175 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6176 } 6177 break; 6178 } 6179 6180 default: 6181 ALOGE("Invalid port mode: %d", mode); 6182 return; 6183 } 6184} 6185 6186//////////////////////////////////////////////////////////////////////////////// 6187 6188ACodec::UninitializedState::UninitializedState(ACodec *codec) 6189 : BaseState(codec) { 6190} 6191 6192void ACodec::UninitializedState::stateEntered() { 6193 ALOGV("Now uninitialized"); 6194 6195 if (mDeathNotifier != NULL) { 6196 if (mCodec->mOMXNode != NULL) { 6197 sp<IBinder> binder = IInterface::asBinder(mCodec->mOMXNode); 6198 binder->unlinkToDeath(mDeathNotifier); 6199 } 6200 mDeathNotifier.clear(); 6201 } 6202 6203 mCodec->mUsingNativeWindow = false; 6204 mCodec->mNativeWindow.clear(); 6205 mCodec->mNativeWindowUsageBits = 0; 6206 mCodec->mOMX.clear(); 6207 mCodec->mOMXNode.clear(); 6208 mCodec->mFlags = 0; 6209 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 6210 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 6211 mCodec->mConverter[0].clear(); 6212 mCodec->mConverter[1].clear(); 6213 mCodec->mComponentName.clear(); 6214} 6215 6216bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 6217 bool handled = false; 6218 6219 switch (msg->what()) { 6220 case ACodec::kWhatSetup: 6221 { 6222 onSetup(msg); 6223 6224 handled = true; 6225 break; 6226 } 6227 6228 case ACodec::kWhatAllocateComponent: 6229 { 6230 onAllocateComponent(msg); 6231 handled = true; 6232 break; 6233 } 6234 6235 case ACodec::kWhatShutdown: 6236 { 6237 int32_t keepComponentAllocated; 6238 CHECK(msg->findInt32( 6239 "keepComponentAllocated", &keepComponentAllocated)); 6240 ALOGW_IF(keepComponentAllocated, 6241 "cannot keep component allocated on shutdown in Uninitialized state"); 6242 6243 sp<AMessage> notify = mCodec->mNotify->dup(); 6244 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6245 notify->post(); 6246 6247 handled = true; 6248 break; 6249 } 6250 6251 case ACodec::kWhatFlush: 6252 { 6253 sp<AMessage> notify = mCodec->mNotify->dup(); 6254 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6255 notify->post(); 6256 6257 handled = true; 6258 break; 6259 } 6260 6261 case ACodec::kWhatReleaseCodecInstance: 6262 { 6263 // nothing to do, as we have already signaled shutdown 6264 handled = true; 6265 break; 6266 } 6267 6268 default: 6269 return BaseState::onMessageReceived(msg); 6270 } 6271 6272 return handled; 6273} 6274 6275void ACodec::UninitializedState::onSetup( 6276 const sp<AMessage> &msg) { 6277 if (onAllocateComponent(msg) 6278 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6279 mCodec->mLoadedState->onStart(); 6280 } 6281} 6282 6283bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6284 ALOGV("onAllocateComponent"); 6285 6286 CHECK(mCodec->mOMXNode == NULL); 6287 6288 OMXClient client; 6289 if (client.connect() != OK) { 6290 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6291 return false; 6292 } 6293 6294 sp<IOMX> omx = client.interface(); 6295 6296 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6297 6298 Vector<AString> matchingCodecs; 6299 6300 AString mime; 6301 6302 AString componentName; 6303 uint32_t quirks = 0; 6304 int32_t encoder = false; 6305 if (msg->findString("componentName", &componentName)) { 6306 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6307 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6308 matchingCodecs.add(componentName); 6309 } 6310 } else { 6311 CHECK(msg->findString("mime", &mime)); 6312 6313 if (!msg->findInt32("encoder", &encoder)) { 6314 encoder = false; 6315 } 6316 6317 MediaCodecList::findMatchingCodecs( 6318 mime.c_str(), 6319 encoder, // createEncoder 6320 0, // flags 6321 &matchingCodecs); 6322 } 6323 6324 sp<CodecObserver> observer = new CodecObserver; 6325 sp<IOMXNode> omxNode; 6326 6327 status_t err = NAME_NOT_FOUND; 6328 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6329 ++matchIndex) { 6330 componentName = matchingCodecs[matchIndex]; 6331 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6332 6333 pid_t tid = gettid(); 6334 int prevPriority = androidGetThreadPriority(tid); 6335 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6336 err = omx->allocateNode(componentName.c_str(), observer, &omxNode); 6337 androidSetThreadPriority(tid, prevPriority); 6338 6339 if (err == OK) { 6340 break; 6341 } else { 6342 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6343 } 6344 6345 omxNode = NULL; 6346 } 6347 6348 if (omxNode == NULL) { 6349 if (!mime.empty()) { 6350 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6351 encoder ? "en" : "de", mime.c_str(), err); 6352 } else { 6353 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6354 } 6355 6356 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6357 return false; 6358 } 6359 6360 mDeathNotifier = new DeathNotifier(notify); 6361 if (IInterface::asBinder(omxNode)->linkToDeath(mDeathNotifier) != OK) { 6362 // This was a local binder, if it dies so do we, we won't care 6363 // about any notifications in the afterlife. 6364 mDeathNotifier.clear(); 6365 } 6366 6367 notify = new AMessage(kWhatOMXMessageList, mCodec); 6368 notify->setInt32("generation", ++mCodec->mNodeGeneration); 6369 observer->setNotificationMessage(notify); 6370 6371 mCodec->mComponentName = componentName; 6372 mCodec->mRenderTracker.setComponentName(componentName); 6373 mCodec->mFlags = 0; 6374 6375 if (componentName.endsWith(".secure")) { 6376 mCodec->mFlags |= kFlagIsSecure; 6377 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6378 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6379 } 6380 6381 omxNode->setQuirks(quirks); 6382 mCodec->mOMX = omx; 6383 mCodec->mOMXNode = omxNode; 6384 6385 { 6386 sp<AMessage> notify = mCodec->mNotify->dup(); 6387 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6388 notify->setString("componentName", mCodec->mComponentName.c_str()); 6389 notify->post(); 6390 } 6391 6392 mCodec->changeState(mCodec->mLoadedState); 6393 6394 return true; 6395} 6396 6397//////////////////////////////////////////////////////////////////////////////// 6398 6399ACodec::LoadedState::LoadedState(ACodec *codec) 6400 : BaseState(codec) { 6401} 6402 6403void ACodec::LoadedState::stateEntered() { 6404 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6405 6406 mCodec->mPortEOS[kPortIndexInput] = 6407 mCodec->mPortEOS[kPortIndexOutput] = false; 6408 6409 mCodec->mInputEOSResult = OK; 6410 6411 mCodec->mDequeueCounter = 0; 6412 mCodec->mMetadataBuffersToSubmit = 0; 6413 mCodec->mRepeatFrameDelayUs = -1ll; 6414 mCodec->mInputFormat.clear(); 6415 mCodec->mOutputFormat.clear(); 6416 mCodec->mBaseOutputFormat.clear(); 6417 mCodec->mGraphicBufferSource.clear(); 6418 6419 if (mCodec->mShutdownInProgress) { 6420 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6421 6422 mCodec->mShutdownInProgress = false; 6423 mCodec->mKeepComponentAllocated = false; 6424 6425 onShutdown(keepComponentAllocated); 6426 } 6427 mCodec->mExplicitShutdown = false; 6428 6429 mCodec->processDeferredMessages(); 6430} 6431 6432void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6433 if (!keepComponentAllocated) { 6434 (void)mCodec->mOMXNode->freeNode(); 6435 6436 mCodec->changeState(mCodec->mUninitializedState); 6437 } 6438 6439 if (mCodec->mExplicitShutdown) { 6440 sp<AMessage> notify = mCodec->mNotify->dup(); 6441 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6442 notify->post(); 6443 mCodec->mExplicitShutdown = false; 6444 } 6445} 6446 6447bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6448 bool handled = false; 6449 6450 switch (msg->what()) { 6451 case ACodec::kWhatConfigureComponent: 6452 { 6453 onConfigureComponent(msg); 6454 handled = true; 6455 break; 6456 } 6457 6458 case ACodec::kWhatCreateInputSurface: 6459 { 6460 onCreateInputSurface(msg); 6461 handled = true; 6462 break; 6463 } 6464 6465 case ACodec::kWhatSetInputSurface: 6466 { 6467 onSetInputSurface(msg); 6468 handled = true; 6469 break; 6470 } 6471 6472 case ACodec::kWhatStart: 6473 { 6474 onStart(); 6475 handled = true; 6476 break; 6477 } 6478 6479 case ACodec::kWhatShutdown: 6480 { 6481 int32_t keepComponentAllocated; 6482 CHECK(msg->findInt32( 6483 "keepComponentAllocated", &keepComponentAllocated)); 6484 6485 mCodec->mExplicitShutdown = true; 6486 onShutdown(keepComponentAllocated); 6487 6488 handled = true; 6489 break; 6490 } 6491 6492 case ACodec::kWhatFlush: 6493 { 6494 sp<AMessage> notify = mCodec->mNotify->dup(); 6495 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6496 notify->post(); 6497 6498 handled = true; 6499 break; 6500 } 6501 6502 default: 6503 return BaseState::onMessageReceived(msg); 6504 } 6505 6506 return handled; 6507} 6508 6509bool ACodec::LoadedState::onConfigureComponent( 6510 const sp<AMessage> &msg) { 6511 ALOGV("onConfigureComponent"); 6512 6513 CHECK(mCodec->mOMXNode != NULL); 6514 6515 status_t err = OK; 6516 AString mime; 6517 if (!msg->findString("mime", &mime)) { 6518 err = BAD_VALUE; 6519 } else { 6520 err = mCodec->configureCodec(mime.c_str(), msg); 6521 } 6522 if (err != OK) { 6523 ALOGE("[%s] configureCodec returning error %d", 6524 mCodec->mComponentName.c_str(), err); 6525 6526 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6527 return false; 6528 } 6529 6530 { 6531 sp<AMessage> notify = mCodec->mNotify->dup(); 6532 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6533 notify->setMessage("input-format", mCodec->mInputFormat); 6534 notify->setMessage("output-format", mCodec->mOutputFormat); 6535 notify->post(); 6536 } 6537 6538 return true; 6539} 6540 6541status_t ACodec::LoadedState::setupInputSurface() { 6542 if (mCodec->mGraphicBufferSource == NULL) { 6543 return BAD_VALUE; 6544 } 6545 6546 android_dataspace dataSpace; 6547 status_t err = 6548 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6549 if (err != OK) { 6550 ALOGE("Failed to get default data space"); 6551 return err; 6552 } 6553 6554 err = statusFromBinderStatus( 6555 mCodec->mGraphicBufferSource->configure(mCodec->mOMXNode, dataSpace)); 6556 if (err != OK) { 6557 ALOGE("[%s] Unable to configure for node (err %d)", 6558 mCodec->mComponentName.c_str(), err); 6559 return err; 6560 } 6561 6562 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6563 err = statusFromBinderStatus( 6564 mCodec->mGraphicBufferSource->setRepeatPreviousFrameDelayUs( 6565 mCodec->mRepeatFrameDelayUs)); 6566 6567 if (err != OK) { 6568 ALOGE("[%s] Unable to configure option to repeat previous " 6569 "frames (err %d)", 6570 mCodec->mComponentName.c_str(), err); 6571 return err; 6572 } 6573 } 6574 6575 if (mCodec->mMaxPtsGapUs > 0ll) { 6576 OMX_PARAM_U32TYPE maxPtsGapParams; 6577 InitOMXParams(&maxPtsGapParams); 6578 maxPtsGapParams.nPortIndex = kPortIndexInput; 6579 maxPtsGapParams.nU32 = (uint32_t) mCodec->mMaxPtsGapUs; 6580 6581 err = mCodec->mOMXNode->setParameter( 6582 (OMX_INDEXTYPE)OMX_IndexParamMaxFrameDurationForBitrateControl, 6583 &maxPtsGapParams, sizeof(maxPtsGapParams)); 6584 6585 if (err != OK) { 6586 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6587 mCodec->mComponentName.c_str(), err); 6588 return err; 6589 } 6590 } 6591 6592 if (mCodec->mMaxFps > 0) { 6593 err = statusFromBinderStatus( 6594 mCodec->mGraphicBufferSource->setMaxFps(mCodec->mMaxFps)); 6595 6596 if (err != OK) { 6597 ALOGE("[%s] Unable to configure max fps (err %d)", 6598 mCodec->mComponentName.c_str(), err); 6599 return err; 6600 } 6601 } 6602 6603 if (mCodec->mTimePerCaptureUs > 0ll 6604 && mCodec->mTimePerFrameUs > 0ll) { 6605 err = statusFromBinderStatus( 6606 mCodec->mGraphicBufferSource->setTimeLapseConfig( 6607 mCodec->mTimePerFrameUs, mCodec->mTimePerCaptureUs)); 6608 6609 if (err != OK) { 6610 ALOGE("[%s] Unable to configure time lapse (err %d)", 6611 mCodec->mComponentName.c_str(), err); 6612 return err; 6613 } 6614 } 6615 6616 if (mCodec->mCreateInputBuffersSuspended) { 6617 err = statusFromBinderStatus( 6618 mCodec->mGraphicBufferSource->setSuspend(true)); 6619 6620 if (err != OK) { 6621 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6622 mCodec->mComponentName.c_str(), err); 6623 return err; 6624 } 6625 } 6626 6627 uint32_t usageBits; 6628 if (mCodec->mOMXNode->getParameter( 6629 (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6630 &usageBits, sizeof(usageBits)) == OK) { 6631 mCodec->mInputFormat->setInt32( 6632 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6633 } 6634 6635 sp<ABuffer> colorAspectsBuffer; 6636 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6637 if (colorAspectsBuffer->size() != sizeof(ColorAspects)) { 6638 return INVALID_OPERATION; 6639 } 6640 6641 err = statusFromBinderStatus( 6642 mCodec->mGraphicBufferSource->setColorAspects(ColorUtils::packToU32( 6643 *(ColorAspects *)colorAspectsBuffer->base()))); 6644 6645 if (err != OK) { 6646 ALOGE("[%s] Unable to configure color aspects (err %d)", 6647 mCodec->mComponentName.c_str(), err); 6648 return err; 6649 } 6650 } 6651 return OK; 6652} 6653 6654void ACodec::LoadedState::onCreateInputSurface( 6655 const sp<AMessage> & /* msg */) { 6656 ALOGV("onCreateInputSurface"); 6657 6658 sp<AMessage> notify = mCodec->mNotify->dup(); 6659 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6660 6661 sp<IGraphicBufferProducer> bufferProducer; 6662 status_t err = mCodec->mOMX->createInputSurface( 6663 &bufferProducer, &mCodec->mGraphicBufferSource); 6664 6665 if (err == OK) { 6666 err = setupInputSurface(); 6667 } 6668 6669 if (err == OK) { 6670 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6671 6672 notify->setMessage("input-format", mCodec->mInputFormat); 6673 notify->setMessage("output-format", mCodec->mOutputFormat); 6674 6675 notify->setObject("input-surface", 6676 new BufferProducerWrapper(bufferProducer)); 6677 } else { 6678 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6679 // the error through because it's in the "configured" state. We 6680 // send a kWhatInputSurfaceCreated with an error value instead. 6681 ALOGE("[%s] onCreateInputSurface returning error %d", 6682 mCodec->mComponentName.c_str(), err); 6683 notify->setInt32("err", err); 6684 } 6685 notify->post(); 6686} 6687 6688void ACodec::LoadedState::onSetInputSurface( 6689 const sp<AMessage> &msg) { 6690 ALOGV("onSetInputSurface"); 6691 6692 sp<AMessage> notify = mCodec->mNotify->dup(); 6693 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6694 6695 sp<RefBase> obj; 6696 CHECK(msg->findObject("input-surface", &obj)); 6697 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6698 mCodec->mGraphicBufferSource = surface->getBufferSource(); 6699 6700 status_t err = setupInputSurface(); 6701 6702 if (err == OK) { 6703 mCodec->mInputMetadataType = kMetadataBufferTypeANWBuffer; 6704 6705 notify->setMessage("input-format", mCodec->mInputFormat); 6706 notify->setMessage("output-format", mCodec->mOutputFormat); 6707 } else { 6708 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6709 // the error through because it's in the "configured" state. We 6710 // send a kWhatInputSurfaceAccepted with an error value instead. 6711 ALOGE("[%s] onSetInputSurface returning error %d", 6712 mCodec->mComponentName.c_str(), err); 6713 notify->setInt32("err", err); 6714 } 6715 notify->post(); 6716} 6717 6718void ACodec::LoadedState::onStart() { 6719 ALOGV("onStart"); 6720 6721 status_t err = mCodec->mOMXNode->sendCommand(OMX_CommandStateSet, OMX_StateIdle); 6722 if (err != OK) { 6723 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6724 } else { 6725 mCodec->changeState(mCodec->mLoadedToIdleState); 6726 } 6727} 6728 6729//////////////////////////////////////////////////////////////////////////////// 6730 6731ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 6732 : BaseState(codec) { 6733} 6734 6735void ACodec::LoadedToIdleState::stateEntered() { 6736 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 6737 6738 status_t err; 6739 if ((err = allocateBuffers()) != OK) { 6740 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 6741 "(error 0x%08x)", 6742 err); 6743 6744 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6745 6746 mCodec->mOMXNode->sendCommand( 6747 OMX_CommandStateSet, OMX_StateLoaded); 6748 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 6749 mCodec->freeBuffersOnPort(kPortIndexInput); 6750 } 6751 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 6752 mCodec->freeBuffersOnPort(kPortIndexOutput); 6753 } 6754 6755 mCodec->changeState(mCodec->mLoadedState); 6756 } 6757} 6758 6759status_t ACodec::LoadedToIdleState::allocateBuffers() { 6760 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 6761 6762 if (err != OK) { 6763 return err; 6764 } 6765 6766 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 6767} 6768 6769bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 6770 switch (msg->what()) { 6771 case kWhatSetParameters: 6772 case kWhatShutdown: 6773 { 6774 mCodec->deferMessage(msg); 6775 return true; 6776 } 6777 6778 case kWhatSignalEndOfInputStream: 6779 { 6780 mCodec->onSignalEndOfInputStream(); 6781 return true; 6782 } 6783 6784 case kWhatResume: 6785 { 6786 // We'll be active soon enough. 6787 return true; 6788 } 6789 6790 case kWhatFlush: 6791 { 6792 // We haven't even started yet, so we're flushed alright... 6793 sp<AMessage> notify = mCodec->mNotify->dup(); 6794 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6795 notify->post(); 6796 return true; 6797 } 6798 6799 default: 6800 return BaseState::onMessageReceived(msg); 6801 } 6802} 6803 6804bool ACodec::LoadedToIdleState::onOMXEvent( 6805 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6806 switch (event) { 6807 case OMX_EventCmdComplete: 6808 { 6809 status_t err = OK; 6810 if (data1 != (OMX_U32)OMX_CommandStateSet 6811 || data2 != (OMX_U32)OMX_StateIdle) { 6812 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 6813 asString((OMX_COMMANDTYPE)data1), data1, 6814 asString((OMX_STATETYPE)data2), data2); 6815 err = FAILED_TRANSACTION; 6816 } 6817 6818 if (err == OK) { 6819 err = mCodec->mOMXNode->sendCommand( 6820 OMX_CommandStateSet, OMX_StateExecuting); 6821 } 6822 6823 if (err != OK) { 6824 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6825 } else { 6826 mCodec->changeState(mCodec->mIdleToExecutingState); 6827 } 6828 6829 return true; 6830 } 6831 6832 default: 6833 return BaseState::onOMXEvent(event, data1, data2); 6834 } 6835} 6836 6837//////////////////////////////////////////////////////////////////////////////// 6838 6839ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 6840 : BaseState(codec) { 6841} 6842 6843void ACodec::IdleToExecutingState::stateEntered() { 6844 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 6845} 6846 6847bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6848 switch (msg->what()) { 6849 case kWhatSetParameters: 6850 case kWhatShutdown: 6851 { 6852 mCodec->deferMessage(msg); 6853 return true; 6854 } 6855 6856 case kWhatResume: 6857 { 6858 // We'll be active soon enough. 6859 return true; 6860 } 6861 6862 case kWhatFlush: 6863 { 6864 // We haven't even started yet, so we're flushed alright... 6865 sp<AMessage> notify = mCodec->mNotify->dup(); 6866 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6867 notify->post(); 6868 6869 return true; 6870 } 6871 6872 case kWhatSignalEndOfInputStream: 6873 { 6874 mCodec->onSignalEndOfInputStream(); 6875 return true; 6876 } 6877 6878 default: 6879 return BaseState::onMessageReceived(msg); 6880 } 6881} 6882 6883bool ACodec::IdleToExecutingState::onOMXEvent( 6884 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6885 switch (event) { 6886 case OMX_EventCmdComplete: 6887 { 6888 if (data1 != (OMX_U32)OMX_CommandStateSet 6889 || data2 != (OMX_U32)OMX_StateExecuting) { 6890 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 6891 asString((OMX_COMMANDTYPE)data1), data1, 6892 asString((OMX_STATETYPE)data2), data2); 6893 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6894 return true; 6895 } 6896 6897 mCodec->mExecutingState->resume(); 6898 mCodec->changeState(mCodec->mExecutingState); 6899 6900 return true; 6901 } 6902 6903 default: 6904 return BaseState::onOMXEvent(event, data1, data2); 6905 } 6906} 6907 6908//////////////////////////////////////////////////////////////////////////////// 6909 6910ACodec::ExecutingState::ExecutingState(ACodec *codec) 6911 : BaseState(codec), 6912 mActive(false) { 6913} 6914 6915ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 6916 OMX_U32 /* portIndex */) { 6917 return RESUBMIT_BUFFERS; 6918} 6919 6920void ACodec::ExecutingState::submitOutputMetaBuffers() { 6921 // submit as many buffers as there are input buffers with the codec 6922 // in case we are in port reconfiguring 6923 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 6924 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6925 6926 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 6927 if (mCodec->submitOutputMetadataBuffer() != OK) 6928 break; 6929 } 6930 } 6931 6932 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 6933 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 6934} 6935 6936void ACodec::ExecutingState::submitRegularOutputBuffers() { 6937 bool failed = false; 6938 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 6939 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 6940 6941 if (mCodec->mNativeWindow != NULL) { 6942 if (info->mStatus != BufferInfo::OWNED_BY_US 6943 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6944 ALOGE("buffers should be owned by us or the surface"); 6945 failed = true; 6946 break; 6947 } 6948 6949 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6950 continue; 6951 } 6952 } else { 6953 if (info->mStatus != BufferInfo::OWNED_BY_US) { 6954 ALOGE("buffers should be owned by us"); 6955 failed = true; 6956 break; 6957 } 6958 } 6959 6960 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 6961 6962 info->checkWriteFence("submitRegularOutputBuffers"); 6963 status_t err = mCodec->mOMXNode->fillBuffer(info->mBufferID, info->mFenceFd); 6964 info->mFenceFd = -1; 6965 if (err != OK) { 6966 failed = true; 6967 break; 6968 } 6969 6970 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6971 } 6972 6973 if (failed) { 6974 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6975 } 6976} 6977 6978void ACodec::ExecutingState::submitOutputBuffers() { 6979 submitRegularOutputBuffers(); 6980 if (mCodec->storingMetadataInDecodedBuffers()) { 6981 submitOutputMetaBuffers(); 6982 } 6983} 6984 6985void ACodec::ExecutingState::resume() { 6986 if (mActive) { 6987 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 6988 return; 6989 } 6990 6991 submitOutputBuffers(); 6992 6993 // Post all available input buffers 6994 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 6995 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 6996 } 6997 6998 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 6999 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 7000 if (info->mStatus == BufferInfo::OWNED_BY_US) { 7001 postFillThisBuffer(info); 7002 } 7003 } 7004 7005 mActive = true; 7006} 7007 7008void ACodec::ExecutingState::stateEntered() { 7009 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 7010 7011 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7012 mCodec->processDeferredMessages(); 7013} 7014 7015bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 7016 bool handled = false; 7017 7018 switch (msg->what()) { 7019 case kWhatShutdown: 7020 { 7021 int32_t keepComponentAllocated; 7022 CHECK(msg->findInt32( 7023 "keepComponentAllocated", &keepComponentAllocated)); 7024 7025 mCodec->mShutdownInProgress = true; 7026 mCodec->mExplicitShutdown = true; 7027 mCodec->mKeepComponentAllocated = keepComponentAllocated; 7028 7029 mActive = false; 7030 7031 status_t err = mCodec->mOMXNode->sendCommand( 7032 OMX_CommandStateSet, OMX_StateIdle); 7033 if (err != OK) { 7034 if (keepComponentAllocated) { 7035 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7036 } 7037 // TODO: do some recovery here. 7038 } else { 7039 mCodec->changeState(mCodec->mExecutingToIdleState); 7040 } 7041 7042 handled = true; 7043 break; 7044 } 7045 7046 case kWhatFlush: 7047 { 7048 ALOGV("[%s] ExecutingState flushing now " 7049 "(codec owns %zu/%zu input, %zu/%zu output).", 7050 mCodec->mComponentName.c_str(), 7051 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 7052 mCodec->mBuffers[kPortIndexInput].size(), 7053 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 7054 mCodec->mBuffers[kPortIndexOutput].size()); 7055 7056 mActive = false; 7057 7058 status_t err = mCodec->mOMXNode->sendCommand(OMX_CommandFlush, OMX_ALL); 7059 if (err != OK) { 7060 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7061 } else { 7062 mCodec->changeState(mCodec->mFlushingState); 7063 } 7064 7065 handled = true; 7066 break; 7067 } 7068 7069 case kWhatResume: 7070 { 7071 resume(); 7072 7073 handled = true; 7074 break; 7075 } 7076 7077 case kWhatRequestIDRFrame: 7078 { 7079 status_t err = mCodec->requestIDRFrame(); 7080 if (err != OK) { 7081 ALOGW("Requesting an IDR frame failed."); 7082 } 7083 7084 handled = true; 7085 break; 7086 } 7087 7088 case kWhatSetParameters: 7089 { 7090 sp<AMessage> params; 7091 CHECK(msg->findMessage("params", ¶ms)); 7092 7093 status_t err = mCodec->setParameters(params); 7094 7095 sp<AMessage> reply; 7096 if (msg->findMessage("reply", &reply)) { 7097 reply->setInt32("err", err); 7098 reply->post(); 7099 } 7100 7101 handled = true; 7102 break; 7103 } 7104 7105 case ACodec::kWhatSignalEndOfInputStream: 7106 { 7107 mCodec->onSignalEndOfInputStream(); 7108 handled = true; 7109 break; 7110 } 7111 7112 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7113 case kWhatSubmitOutputMetadataBufferIfEOS: 7114 { 7115 if (mCodec->mPortEOS[kPortIndexInput] && 7116 !mCodec->mPortEOS[kPortIndexOutput]) { 7117 status_t err = mCodec->submitOutputMetadataBuffer(); 7118 if (err == OK) { 7119 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7120 } 7121 } 7122 return true; 7123 } 7124 7125 default: 7126 handled = BaseState::onMessageReceived(msg); 7127 break; 7128 } 7129 7130 return handled; 7131} 7132 7133status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 7134 int32_t videoBitrate; 7135 if (params->findInt32("video-bitrate", &videoBitrate)) { 7136 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 7137 InitOMXParams(&configParams); 7138 configParams.nPortIndex = kPortIndexOutput; 7139 configParams.nEncodeBitrate = videoBitrate; 7140 7141 status_t err = mOMXNode->setConfig( 7142 OMX_IndexConfigVideoBitrate, 7143 &configParams, 7144 sizeof(configParams)); 7145 7146 if (err != OK) { 7147 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 7148 videoBitrate, err); 7149 7150 return err; 7151 } 7152 } 7153 7154 int64_t timeOffsetUs; 7155 if (params->findInt64("time-offset-us", &timeOffsetUs)) { 7156 if (mGraphicBufferSource == NULL) { 7157 ALOGE("[%s] Invalid to set input buffer time offset without surface", 7158 mComponentName.c_str()); 7159 return INVALID_OPERATION; 7160 } 7161 7162 status_t err = statusFromBinderStatus( 7163 mGraphicBufferSource->setTimeOffsetUs(timeOffsetUs)); 7164 7165 if (err != OK) { 7166 ALOGE("[%s] Unable to set input buffer time offset (err %d)", 7167 mComponentName.c_str(), 7168 err); 7169 return err; 7170 } 7171 } 7172 7173 int64_t skipFramesBeforeUs; 7174 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 7175 if (mGraphicBufferSource == NULL) { 7176 ALOGE("[%s] Invalid to set start time without surface", 7177 mComponentName.c_str()); 7178 return INVALID_OPERATION; 7179 } 7180 7181 status_t err = statusFromBinderStatus( 7182 mGraphicBufferSource->setStartTimeUs(skipFramesBeforeUs)); 7183 7184 if (err != OK) { 7185 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 7186 return err; 7187 } 7188 } 7189 7190 int32_t dropInputFrames; 7191 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 7192 if (mGraphicBufferSource == NULL) { 7193 ALOGE("[%s] Invalid to set suspend without surface", 7194 mComponentName.c_str()); 7195 return INVALID_OPERATION; 7196 } 7197 7198 status_t err = statusFromBinderStatus( 7199 mGraphicBufferSource->setSuspend(dropInputFrames != 0)); 7200 7201 if (err != OK) { 7202 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 7203 return err; 7204 } 7205 } 7206 7207 int32_t dummy; 7208 if (params->findInt32("request-sync", &dummy)) { 7209 status_t err = requestIDRFrame(); 7210 7211 if (err != OK) { 7212 ALOGE("Requesting a sync frame failed w/ err %d", err); 7213 return err; 7214 } 7215 } 7216 7217 float rate; 7218 if (params->findFloat("operating-rate", &rate) && rate > 0) { 7219 status_t err = setOperatingRate(rate, mIsVideo); 7220 if (err != OK) { 7221 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 7222 return err; 7223 } 7224 } 7225 7226 int32_t intraRefreshPeriod = 0; 7227 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 7228 && intraRefreshPeriod > 0) { 7229 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 7230 if (err != OK) { 7231 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 7232 mComponentName.c_str()); 7233 err = OK; 7234 } 7235 } 7236 7237 status_t err = configureTemporalLayers(params, false /* inConfigure */, mOutputFormat); 7238 if (err != OK) { 7239 err = OK; // ignore failure 7240 } 7241 7242 return err; 7243} 7244 7245void ACodec::onSignalEndOfInputStream() { 7246 sp<AMessage> notify = mNotify->dup(); 7247 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 7248 7249 status_t err = INVALID_OPERATION; 7250 if (mGraphicBufferSource != NULL) { 7251 err = statusFromBinderStatus(mGraphicBufferSource->signalEndOfInputStream()); 7252 } 7253 if (err != OK) { 7254 notify->setInt32("err", err); 7255 } 7256 notify->post(); 7257} 7258 7259bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 7260 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7261 return true; 7262} 7263 7264bool ACodec::ExecutingState::onOMXEvent( 7265 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7266 switch (event) { 7267 case OMX_EventPortSettingsChanged: 7268 { 7269 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 7270 7271 mCodec->onOutputFormatChanged(); 7272 7273 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 7274 mCodec->mMetadataBuffersToSubmit = 0; 7275 CHECK_EQ(mCodec->mOMXNode->sendCommand( 7276 OMX_CommandPortDisable, kPortIndexOutput), 7277 (status_t)OK); 7278 7279 mCodec->freeOutputBuffersNotOwnedByComponent(); 7280 7281 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7282 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7283 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7284 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7285 mCodec->mComponentName.c_str(), data2); 7286 } 7287 7288 return true; 7289 } 7290 7291 case OMX_EventBufferFlag: 7292 { 7293 return true; 7294 } 7295 7296 default: 7297 return BaseState::onOMXEvent(event, data1, data2); 7298 } 7299} 7300 7301//////////////////////////////////////////////////////////////////////////////// 7302 7303ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7304 ACodec *codec) 7305 : BaseState(codec) { 7306} 7307 7308ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7309 OMX_U32 portIndex) { 7310 if (portIndex == kPortIndexOutput) { 7311 return FREE_BUFFERS; 7312 } 7313 7314 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7315 7316 return RESUBMIT_BUFFERS; 7317} 7318 7319bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7320 const sp<AMessage> &msg) { 7321 bool handled = false; 7322 7323 switch (msg->what()) { 7324 case kWhatFlush: 7325 case kWhatShutdown: 7326 case kWhatResume: 7327 case kWhatSetParameters: 7328 { 7329 if (msg->what() == kWhatResume) { 7330 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7331 } 7332 7333 mCodec->deferMessage(msg); 7334 handled = true; 7335 break; 7336 } 7337 7338 default: 7339 handled = BaseState::onMessageReceived(msg); 7340 break; 7341 } 7342 7343 return handled; 7344} 7345 7346void ACodec::OutputPortSettingsChangedState::stateEntered() { 7347 ALOGV("[%s] Now handling output port settings change", 7348 mCodec->mComponentName.c_str()); 7349} 7350 7351bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7352 int64_t mediaTimeUs, nsecs_t systemNano) { 7353 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7354 return true; 7355} 7356 7357bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7358 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7359 switch (event) { 7360 case OMX_EventCmdComplete: 7361 { 7362 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7363 if (data2 != (OMX_U32)kPortIndexOutput) { 7364 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7365 return false; 7366 } 7367 7368 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7369 7370 status_t err = OK; 7371 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7372 ALOGE("disabled port should be empty, but has %zu buffers", 7373 mCodec->mBuffers[kPortIndexOutput].size()); 7374 err = FAILED_TRANSACTION; 7375 } else { 7376 mCodec->mDealer[kPortIndexOutput].clear(); 7377 } 7378 7379 if (err == OK) { 7380 err = mCodec->mOMXNode->sendCommand( 7381 OMX_CommandPortEnable, kPortIndexOutput); 7382 } 7383 7384 if (err == OK) { 7385 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7386 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7387 "reconfiguration: (%d)", err); 7388 } 7389 7390 if (err != OK) { 7391 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7392 7393 // This is technically not correct, but appears to be 7394 // the only way to free the component instance. 7395 // Controlled transitioning from excecuting->idle 7396 // and idle->loaded seem impossible probably because 7397 // the output port never finishes re-enabling. 7398 mCodec->mShutdownInProgress = true; 7399 mCodec->mKeepComponentAllocated = false; 7400 mCodec->changeState(mCodec->mLoadedState); 7401 } 7402 7403 return true; 7404 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7405 if (data2 != (OMX_U32)kPortIndexOutput) { 7406 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7407 return false; 7408 } 7409 7410 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7411 7412 if (mCodec->mExecutingState->active()) { 7413 mCodec->mExecutingState->submitOutputBuffers(); 7414 } 7415 7416 mCodec->changeState(mCodec->mExecutingState); 7417 7418 return true; 7419 } 7420 7421 return false; 7422 } 7423 7424 default: 7425 return false; 7426 } 7427} 7428 7429//////////////////////////////////////////////////////////////////////////////// 7430 7431ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7432 : BaseState(codec), 7433 mComponentNowIdle(false) { 7434} 7435 7436bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7437 bool handled = false; 7438 7439 switch (msg->what()) { 7440 case kWhatFlush: 7441 { 7442 // Don't send me a flush request if you previously wanted me 7443 // to shutdown. 7444 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7445 break; 7446 } 7447 7448 case kWhatShutdown: 7449 { 7450 // We're already doing that... 7451 7452 handled = true; 7453 break; 7454 } 7455 7456 default: 7457 handled = BaseState::onMessageReceived(msg); 7458 break; 7459 } 7460 7461 return handled; 7462} 7463 7464void ACodec::ExecutingToIdleState::stateEntered() { 7465 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7466 7467 mComponentNowIdle = false; 7468 mCodec->mLastOutputFormat.clear(); 7469} 7470 7471bool ACodec::ExecutingToIdleState::onOMXEvent( 7472 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7473 switch (event) { 7474 case OMX_EventCmdComplete: 7475 { 7476 if (data1 != (OMX_U32)OMX_CommandStateSet 7477 || data2 != (OMX_U32)OMX_StateIdle) { 7478 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7479 asString((OMX_COMMANDTYPE)data1), data1, 7480 asString((OMX_STATETYPE)data2), data2); 7481 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7482 return true; 7483 } 7484 7485 mComponentNowIdle = true; 7486 7487 changeStateIfWeOwnAllBuffers(); 7488 7489 return true; 7490 } 7491 7492 case OMX_EventPortSettingsChanged: 7493 case OMX_EventBufferFlag: 7494 { 7495 // We're shutting down and don't care about this anymore. 7496 return true; 7497 } 7498 7499 default: 7500 return BaseState::onOMXEvent(event, data1, data2); 7501 } 7502} 7503 7504void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7505 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7506 status_t err = mCodec->mOMXNode->sendCommand( 7507 OMX_CommandStateSet, OMX_StateLoaded); 7508 if (err == OK) { 7509 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7510 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7511 if (err == OK) { 7512 err = err2; 7513 } 7514 } 7515 7516 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7517 && mCodec->mNativeWindow != NULL) { 7518 // We push enough 1x1 blank buffers to ensure that one of 7519 // them has made it to the display. This allows the OMX 7520 // component teardown to zero out any protected buffers 7521 // without the risk of scanning out one of those buffers. 7522 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7523 } 7524 7525 if (err != OK) { 7526 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7527 return; 7528 } 7529 7530 mCodec->changeState(mCodec->mIdleToLoadedState); 7531 } 7532} 7533 7534void ACodec::ExecutingToIdleState::onInputBufferFilled( 7535 const sp<AMessage> &msg) { 7536 BaseState::onInputBufferFilled(msg); 7537 7538 changeStateIfWeOwnAllBuffers(); 7539} 7540 7541void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7542 const sp<AMessage> &msg) { 7543 BaseState::onOutputBufferDrained(msg); 7544 7545 changeStateIfWeOwnAllBuffers(); 7546} 7547 7548//////////////////////////////////////////////////////////////////////////////// 7549 7550ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7551 : BaseState(codec) { 7552} 7553 7554bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7555 bool handled = false; 7556 7557 switch (msg->what()) { 7558 case kWhatShutdown: 7559 { 7560 // We're already doing that... 7561 7562 handled = true; 7563 break; 7564 } 7565 7566 case kWhatFlush: 7567 { 7568 // Don't send me a flush request if you previously wanted me 7569 // to shutdown. 7570 ALOGE("Got flush request in IdleToLoadedState"); 7571 break; 7572 } 7573 7574 default: 7575 handled = BaseState::onMessageReceived(msg); 7576 break; 7577 } 7578 7579 return handled; 7580} 7581 7582void ACodec::IdleToLoadedState::stateEntered() { 7583 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7584} 7585 7586bool ACodec::IdleToLoadedState::onOMXEvent( 7587 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7588 switch (event) { 7589 case OMX_EventCmdComplete: 7590 { 7591 if (data1 != (OMX_U32)OMX_CommandStateSet 7592 || data2 != (OMX_U32)OMX_StateLoaded) { 7593 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7594 asString((OMX_COMMANDTYPE)data1), data1, 7595 asString((OMX_STATETYPE)data2), data2); 7596 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7597 return true; 7598 } 7599 7600 mCodec->changeState(mCodec->mLoadedState); 7601 7602 return true; 7603 } 7604 7605 default: 7606 return BaseState::onOMXEvent(event, data1, data2); 7607 } 7608} 7609 7610//////////////////////////////////////////////////////////////////////////////// 7611 7612ACodec::FlushingState::FlushingState(ACodec *codec) 7613 : BaseState(codec) { 7614} 7615 7616void ACodec::FlushingState::stateEntered() { 7617 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7618 7619 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7620} 7621 7622bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7623 bool handled = false; 7624 7625 switch (msg->what()) { 7626 case kWhatShutdown: 7627 { 7628 mCodec->deferMessage(msg); 7629 break; 7630 } 7631 7632 case kWhatFlush: 7633 { 7634 // We're already doing this right now. 7635 handled = true; 7636 break; 7637 } 7638 7639 default: 7640 handled = BaseState::onMessageReceived(msg); 7641 break; 7642 } 7643 7644 return handled; 7645} 7646 7647bool ACodec::FlushingState::onOMXEvent( 7648 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7649 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7650 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7651 7652 switch (event) { 7653 case OMX_EventCmdComplete: 7654 { 7655 if (data1 != (OMX_U32)OMX_CommandFlush) { 7656 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7657 asString((OMX_COMMANDTYPE)data1), data1, data2); 7658 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7659 return true; 7660 } 7661 7662 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7663 if (mFlushComplete[data2]) { 7664 ALOGW("Flush already completed for %s port", 7665 data2 == kPortIndexInput ? "input" : "output"); 7666 return true; 7667 } 7668 mFlushComplete[data2] = true; 7669 7670 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7671 changeStateIfWeOwnAllBuffers(); 7672 } 7673 } else if (data2 == OMX_ALL) { 7674 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7675 ALOGW("received flush complete event for OMX_ALL before ports have been" 7676 "flushed (%d/%d)", 7677 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7678 return false; 7679 } 7680 7681 changeStateIfWeOwnAllBuffers(); 7682 } else { 7683 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7684 } 7685 7686 return true; 7687 } 7688 7689 case OMX_EventPortSettingsChanged: 7690 { 7691 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7692 msg->setInt32("type", omx_message::EVENT); 7693 msg->setInt32("generation", mCodec->mNodeGeneration); 7694 msg->setInt32("event", event); 7695 msg->setInt32("data1", data1); 7696 msg->setInt32("data2", data2); 7697 7698 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7699 mCodec->mComponentName.c_str()); 7700 7701 mCodec->deferMessage(msg); 7702 7703 return true; 7704 } 7705 7706 default: 7707 return BaseState::onOMXEvent(event, data1, data2); 7708 } 7709 7710 return true; 7711} 7712 7713void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7714 BaseState::onOutputBufferDrained(msg); 7715 7716 changeStateIfWeOwnAllBuffers(); 7717} 7718 7719void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7720 BaseState::onInputBufferFilled(msg); 7721 7722 changeStateIfWeOwnAllBuffers(); 7723} 7724 7725void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 7726 if (mFlushComplete[kPortIndexInput] 7727 && mFlushComplete[kPortIndexOutput] 7728 && mCodec->allYourBuffersAreBelongToUs()) { 7729 // We now own all buffers except possibly those still queued with 7730 // the native window for rendering. Let's get those back as well. 7731 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 7732 7733 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7734 7735 sp<AMessage> notify = mCodec->mNotify->dup(); 7736 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7737 notify->post(); 7738 7739 mCodec->mPortEOS[kPortIndexInput] = 7740 mCodec->mPortEOS[kPortIndexOutput] = false; 7741 7742 mCodec->mInputEOSResult = OK; 7743 7744 if (mCodec->mSkipCutBuffer != NULL) { 7745 mCodec->mSkipCutBuffer->clear(); 7746 } 7747 7748 mCodec->changeState(mCodec->mExecutingState); 7749 } 7750} 7751 7752status_t ACodec::queryCapabilities( 7753 const AString &name, const AString &mime, bool isEncoder, 7754 sp<MediaCodecInfo::Capabilities> *caps) { 7755 (*caps).clear(); 7756 const char *role = GetComponentRole(isEncoder, mime.c_str()); 7757 if (role == NULL) { 7758 return BAD_VALUE; 7759 } 7760 7761 OMXClient client; 7762 status_t err = client.connect(); 7763 if (err != OK) { 7764 return err; 7765 } 7766 7767 sp<IOMX> omx = client.interface(); 7768 sp<CodecObserver> observer = new CodecObserver; 7769 sp<IOMXNode> omxNode; 7770 7771 err = omx->allocateNode(name.c_str(), observer, &omxNode); 7772 if (err != OK) { 7773 client.disconnect(); 7774 return err; 7775 } 7776 7777 err = SetComponentRole(omxNode, role); 7778 if (err != OK) { 7779 omxNode->freeNode(); 7780 client.disconnect(); 7781 return err; 7782 } 7783 7784 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 7785 bool isVideo = mime.startsWithIgnoreCase("video/"); 7786 7787 if (isVideo) { 7788 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 7789 InitOMXParams(¶m); 7790 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7791 7792 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7793 param.nProfileIndex = index; 7794 status_t err = omxNode->getParameter( 7795 OMX_IndexParamVideoProfileLevelQuerySupported, 7796 ¶m, sizeof(param)); 7797 if (err != OK) { 7798 break; 7799 } 7800 builder->addProfileLevel(param.eProfile, param.eLevel); 7801 7802 if (index == kMaxIndicesToCheck) { 7803 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 7804 name.c_str(), index, 7805 param.eProfile, param.eLevel); 7806 } 7807 } 7808 7809 // Color format query 7810 // return colors in the order reported by the OMX component 7811 // prefix "flexible" standard ones with the flexible equivalent 7812 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 7813 InitOMXParams(&portFormat); 7814 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 7815 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 7816 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7817 portFormat.nIndex = index; 7818 status_t err = omxNode->getParameter( 7819 OMX_IndexParamVideoPortFormat, 7820 &portFormat, sizeof(portFormat)); 7821 if (err != OK) { 7822 break; 7823 } 7824 7825 OMX_U32 flexibleEquivalent; 7826 if (IsFlexibleColorFormat( 7827 omxNode, portFormat.eColorFormat, false /* usingNativeWindow */, 7828 &flexibleEquivalent)) { 7829 bool marked = false; 7830 for (size_t i = 0; i < supportedColors.size(); ++i) { 7831 if (supportedColors[i] == flexibleEquivalent) { 7832 marked = true; 7833 break; 7834 } 7835 } 7836 if (!marked) { 7837 supportedColors.push(flexibleEquivalent); 7838 builder->addColorFormat(flexibleEquivalent); 7839 } 7840 } 7841 supportedColors.push(portFormat.eColorFormat); 7842 builder->addColorFormat(portFormat.eColorFormat); 7843 7844 if (index == kMaxIndicesToCheck) { 7845 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 7846 name.c_str(), index, 7847 asString(portFormat.eColorFormat), portFormat.eColorFormat); 7848 } 7849 } 7850 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 7851 // More audio codecs if they have profiles. 7852 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 7853 InitOMXParams(¶m); 7854 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7855 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7856 param.nProfileIndex = index; 7857 status_t err = omxNode->getParameter( 7858 (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 7859 ¶m, sizeof(param)); 7860 if (err != OK) { 7861 break; 7862 } 7863 // For audio, level is ignored. 7864 builder->addProfileLevel(param.eProfile, 0 /* level */); 7865 7866 if (index == kMaxIndicesToCheck) { 7867 ALOGW("[%s] stopping checking profiles after %u: %x", 7868 name.c_str(), index, 7869 param.eProfile); 7870 } 7871 } 7872 7873 // NOTE: Without Android extensions, OMX does not provide a way to query 7874 // AAC profile support 7875 if (param.nProfileIndex == 0) { 7876 ALOGW("component %s doesn't support profile query.", name.c_str()); 7877 } 7878 } 7879 7880 if (isVideo && !isEncoder) { 7881 native_handle_t *sidebandHandle = NULL; 7882 if (omxNode->configureVideoTunnelMode( 7883 kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 7884 // tunneled playback includes adaptive playback 7885 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 7886 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 7887 } else if (omxNode->storeMetaDataInBuffers( 7888 kPortIndexOutput, OMX_TRUE) == OK || 7889 omxNode->prepareForAdaptivePlayback( 7890 kPortIndexOutput, OMX_TRUE, 7891 1280 /* width */, 720 /* height */) == OK) { 7892 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 7893 } 7894 } 7895 7896 if (isVideo && isEncoder) { 7897 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 7898 InitOMXParams(¶ms); 7899 params.nPortIndex = kPortIndexOutput; 7900 // TODO: should we verify if fallback is supported? 7901 if (omxNode->getConfig( 7902 (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 7903 ¶ms, sizeof(params)) == OK) { 7904 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 7905 } 7906 } 7907 7908 *caps = builder; 7909 omxNode->freeNode(); 7910 client.disconnect(); 7911 return OK; 7912} 7913 7914// These are supposed be equivalent to the logic in 7915// "audio_channel_out_mask_from_count". 7916//static 7917status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 7918 switch (numChannels) { 7919 case 1: 7920 map[0] = OMX_AUDIO_ChannelCF; 7921 break; 7922 case 2: 7923 map[0] = OMX_AUDIO_ChannelLF; 7924 map[1] = OMX_AUDIO_ChannelRF; 7925 break; 7926 case 3: 7927 map[0] = OMX_AUDIO_ChannelLF; 7928 map[1] = OMX_AUDIO_ChannelRF; 7929 map[2] = OMX_AUDIO_ChannelCF; 7930 break; 7931 case 4: 7932 map[0] = OMX_AUDIO_ChannelLF; 7933 map[1] = OMX_AUDIO_ChannelRF; 7934 map[2] = OMX_AUDIO_ChannelLR; 7935 map[3] = OMX_AUDIO_ChannelRR; 7936 break; 7937 case 5: 7938 map[0] = OMX_AUDIO_ChannelLF; 7939 map[1] = OMX_AUDIO_ChannelRF; 7940 map[2] = OMX_AUDIO_ChannelCF; 7941 map[3] = OMX_AUDIO_ChannelLR; 7942 map[4] = OMX_AUDIO_ChannelRR; 7943 break; 7944 case 6: 7945 map[0] = OMX_AUDIO_ChannelLF; 7946 map[1] = OMX_AUDIO_ChannelRF; 7947 map[2] = OMX_AUDIO_ChannelCF; 7948 map[3] = OMX_AUDIO_ChannelLFE; 7949 map[4] = OMX_AUDIO_ChannelLR; 7950 map[5] = OMX_AUDIO_ChannelRR; 7951 break; 7952 case 7: 7953 map[0] = OMX_AUDIO_ChannelLF; 7954 map[1] = OMX_AUDIO_ChannelRF; 7955 map[2] = OMX_AUDIO_ChannelCF; 7956 map[3] = OMX_AUDIO_ChannelLFE; 7957 map[4] = OMX_AUDIO_ChannelLR; 7958 map[5] = OMX_AUDIO_ChannelRR; 7959 map[6] = OMX_AUDIO_ChannelCS; 7960 break; 7961 case 8: 7962 map[0] = OMX_AUDIO_ChannelLF; 7963 map[1] = OMX_AUDIO_ChannelRF; 7964 map[2] = OMX_AUDIO_ChannelCF; 7965 map[3] = OMX_AUDIO_ChannelLFE; 7966 map[4] = OMX_AUDIO_ChannelLR; 7967 map[5] = OMX_AUDIO_ChannelRR; 7968 map[6] = OMX_AUDIO_ChannelLS; 7969 map[7] = OMX_AUDIO_ChannelRS; 7970 break; 7971 default: 7972 return -EINVAL; 7973 } 7974 7975 return OK; 7976} 7977 7978} // namespace android 7979