ACodec.cpp revision d77a954c1e6c3524ee2b61cba639e24b6d3320f7
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "ACodec" 19 20#ifdef __LP64__ 21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 22#endif 23 24#include <inttypes.h> 25#include <utils/Trace.h> 26 27#include <gui/Surface.h> 28 29#include <media/stagefright/ACodec.h> 30 31#include <binder/MemoryDealer.h> 32 33#include <media/stagefright/foundation/hexdump.h> 34#include <media/stagefright/foundation/ABuffer.h> 35#include <media/stagefright/foundation/ADebug.h> 36#include <media/stagefright/foundation/AMessage.h> 37#include <media/stagefright/foundation/AUtils.h> 38 39#include <media/stagefright/BufferProducerWrapper.h> 40#include <media/stagefright/MediaCodec.h> 41#include <media/stagefright/MediaCodecList.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/OMXClient.h> 44#include <media/stagefright/PersistentSurface.h> 45#include <media/stagefright/SurfaceUtils.h> 46#include <media/hardware/HardwareAPI.h> 47 48#include <OMX_AudioExt.h> 49#include <OMX_VideoExt.h> 50#include <OMX_Component.h> 51#include <OMX_IndexExt.h> 52#include <OMX_AsString.h> 53 54#include "include/avc_utils.h" 55#include "include/DataConverter.h" 56#include "omx/OMXUtils.h" 57 58namespace android { 59 60enum { 61 kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles 62}; 63 64// OMX errors are directly mapped into status_t range if 65// there is no corresponding MediaError status code. 66// Use the statusFromOMXError(int32_t omxError) function. 67// 68// Currently this is a direct map. 69// See frameworks/native/include/media/openmax/OMX_Core.h 70// 71// Vendor OMX errors from 0x90000000 - 0x9000FFFF 72// Extension OMX errors from 0x8F000000 - 0x90000000 73// Standard OMX errors from 0x80001000 - 0x80001024 (0x80001024 current) 74// 75 76// returns true if err is a recognized OMX error code. 77// as OMX error is OMX_S32, this is an int32_t type 78static inline bool isOMXError(int32_t err) { 79 return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX); 80} 81 82// converts an OMX error to a status_t 83static inline status_t statusFromOMXError(int32_t omxError) { 84 switch (omxError) { 85 case OMX_ErrorInvalidComponentName: 86 case OMX_ErrorComponentNotFound: 87 return NAME_NOT_FOUND; // can trigger illegal argument error for provided names. 88 default: 89 return isOMXError(omxError) ? omxError : 0; // no translation required 90 } 91} 92 93// checks and converts status_t to a non-side-effect status_t 94static inline status_t makeNoSideEffectStatus(status_t err) { 95 switch (err) { 96 // the following errors have side effects and may come 97 // from other code modules. Remap for safety reasons. 98 case INVALID_OPERATION: 99 case DEAD_OBJECT: 100 return UNKNOWN_ERROR; 101 default: 102 return err; 103 } 104} 105 106struct MessageList : public RefBase { 107 MessageList() { 108 } 109 virtual ~MessageList() { 110 } 111 std::list<sp<AMessage> > &getList() { return mList; } 112private: 113 std::list<sp<AMessage> > mList; 114 115 DISALLOW_EVIL_CONSTRUCTORS(MessageList); 116}; 117 118static sp<DataConverter> getCopyConverter() { 119 static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited 120 static sp<DataConverter> sCopyConverter; // zero-inited 121 pthread_once(&once, [](){ sCopyConverter = new DataConverter(); }); 122 return sCopyConverter; 123} 124 125struct CodecObserver : public BnOMXObserver { 126 CodecObserver() {} 127 128 void setNotificationMessage(const sp<AMessage> &msg) { 129 mNotify = msg; 130 } 131 132 // from IOMXObserver 133 virtual void onMessages(const std::list<omx_message> &messages) { 134 if (messages.empty()) { 135 return; 136 } 137 138 sp<AMessage> notify = mNotify->dup(); 139 bool first = true; 140 sp<MessageList> msgList = new MessageList(); 141 for (std::list<omx_message>::const_iterator it = messages.cbegin(); 142 it != messages.cend(); ++it) { 143 const omx_message &omx_msg = *it; 144 if (first) { 145 notify->setInt32("node", omx_msg.node); 146 first = false; 147 } 148 149 sp<AMessage> msg = new AMessage; 150 msg->setInt32("type", omx_msg.type); 151 switch (omx_msg.type) { 152 case omx_message::EVENT: 153 { 154 msg->setInt32("event", omx_msg.u.event_data.event); 155 msg->setInt32("data1", omx_msg.u.event_data.data1); 156 msg->setInt32("data2", omx_msg.u.event_data.data2); 157 break; 158 } 159 160 case omx_message::EMPTY_BUFFER_DONE: 161 { 162 msg->setInt32("buffer", omx_msg.u.buffer_data.buffer); 163 msg->setInt32("fence_fd", omx_msg.fenceFd); 164 break; 165 } 166 167 case omx_message::FILL_BUFFER_DONE: 168 { 169 msg->setInt32( 170 "buffer", omx_msg.u.extended_buffer_data.buffer); 171 msg->setInt32( 172 "range_offset", 173 omx_msg.u.extended_buffer_data.range_offset); 174 msg->setInt32( 175 "range_length", 176 omx_msg.u.extended_buffer_data.range_length); 177 msg->setInt32( 178 "flags", 179 omx_msg.u.extended_buffer_data.flags); 180 msg->setInt64( 181 "timestamp", 182 omx_msg.u.extended_buffer_data.timestamp); 183 msg->setInt32( 184 "fence_fd", omx_msg.fenceFd); 185 break; 186 } 187 188 case omx_message::FRAME_RENDERED: 189 { 190 msg->setInt64( 191 "media_time_us", omx_msg.u.render_data.timestamp); 192 msg->setInt64( 193 "system_nano", omx_msg.u.render_data.nanoTime); 194 break; 195 } 196 197 default: 198 ALOGE("Unrecognized message type: %d", omx_msg.type); 199 break; 200 } 201 msgList->getList().push_back(msg); 202 } 203 notify->setObject("messages", msgList); 204 notify->post(); 205 } 206 207protected: 208 virtual ~CodecObserver() {} 209 210private: 211 sp<AMessage> mNotify; 212 213 DISALLOW_EVIL_CONSTRUCTORS(CodecObserver); 214}; 215 216//////////////////////////////////////////////////////////////////////////////// 217 218struct ACodec::BaseState : public AState { 219 BaseState(ACodec *codec, const sp<AState> &parentState = NULL); 220 221protected: 222 enum PortMode { 223 KEEP_BUFFERS, 224 RESUBMIT_BUFFERS, 225 FREE_BUFFERS, 226 }; 227 228 ACodec *mCodec; 229 230 virtual PortMode getPortMode(OMX_U32 portIndex); 231 232 virtual bool onMessageReceived(const sp<AMessage> &msg); 233 234 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 235 236 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 237 virtual void onInputBufferFilled(const sp<AMessage> &msg); 238 239 void postFillThisBuffer(BufferInfo *info); 240 241private: 242 // Handles an OMX message. Returns true iff message was handled. 243 bool onOMXMessage(const sp<AMessage> &msg); 244 245 // Handles a list of messages. Returns true iff messages were handled. 246 bool onOMXMessageList(const sp<AMessage> &msg); 247 248 // returns true iff this message is for this component and the component is alive 249 bool checkOMXMessage(const sp<AMessage> &msg); 250 251 bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd); 252 253 bool onOMXFillBufferDone( 254 IOMX::buffer_id bufferID, 255 size_t rangeOffset, size_t rangeLength, 256 OMX_U32 flags, 257 int64_t timeUs, 258 int fenceFd); 259 260 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 261 262 void getMoreInputDataIfPossible(); 263 264 DISALLOW_EVIL_CONSTRUCTORS(BaseState); 265}; 266 267//////////////////////////////////////////////////////////////////////////////// 268 269struct ACodec::DeathNotifier : public IBinder::DeathRecipient { 270 DeathNotifier(const sp<AMessage> ¬ify) 271 : mNotify(notify) { 272 } 273 274 virtual void binderDied(const wp<IBinder> &) { 275 mNotify->post(); 276 } 277 278protected: 279 virtual ~DeathNotifier() {} 280 281private: 282 sp<AMessage> mNotify; 283 284 DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier); 285}; 286 287struct ACodec::UninitializedState : public ACodec::BaseState { 288 UninitializedState(ACodec *codec); 289 290protected: 291 virtual bool onMessageReceived(const sp<AMessage> &msg); 292 virtual void stateEntered(); 293 294private: 295 void onSetup(const sp<AMessage> &msg); 296 bool onAllocateComponent(const sp<AMessage> &msg); 297 298 sp<DeathNotifier> mDeathNotifier; 299 300 DISALLOW_EVIL_CONSTRUCTORS(UninitializedState); 301}; 302 303//////////////////////////////////////////////////////////////////////////////// 304 305struct ACodec::LoadedState : public ACodec::BaseState { 306 LoadedState(ACodec *codec); 307 308protected: 309 virtual bool onMessageReceived(const sp<AMessage> &msg); 310 virtual void stateEntered(); 311 312private: 313 friend struct ACodec::UninitializedState; 314 315 bool onConfigureComponent(const sp<AMessage> &msg); 316 void onCreateInputSurface(const sp<AMessage> &msg); 317 void onSetInputSurface(const sp<AMessage> &msg); 318 void onStart(); 319 void onShutdown(bool keepComponentAllocated); 320 321 status_t setupInputSurface(); 322 323 DISALLOW_EVIL_CONSTRUCTORS(LoadedState); 324}; 325 326//////////////////////////////////////////////////////////////////////////////// 327 328struct ACodec::LoadedToIdleState : public ACodec::BaseState { 329 LoadedToIdleState(ACodec *codec); 330 331protected: 332 virtual bool onMessageReceived(const sp<AMessage> &msg); 333 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 334 virtual void stateEntered(); 335 336private: 337 status_t allocateBuffers(); 338 339 DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState); 340}; 341 342//////////////////////////////////////////////////////////////////////////////// 343 344struct ACodec::IdleToExecutingState : public ACodec::BaseState { 345 IdleToExecutingState(ACodec *codec); 346 347protected: 348 virtual bool onMessageReceived(const sp<AMessage> &msg); 349 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 350 virtual void stateEntered(); 351 352private: 353 DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState); 354}; 355 356//////////////////////////////////////////////////////////////////////////////// 357 358struct ACodec::ExecutingState : public ACodec::BaseState { 359 ExecutingState(ACodec *codec); 360 361 void submitRegularOutputBuffers(); 362 void submitOutputMetaBuffers(); 363 void submitOutputBuffers(); 364 365 // Submit output buffers to the decoder, submit input buffers to client 366 // to fill with data. 367 void resume(); 368 369 // Returns true iff input and output buffers are in play. 370 bool active() const { return mActive; } 371 372protected: 373 virtual PortMode getPortMode(OMX_U32 portIndex); 374 virtual bool onMessageReceived(const sp<AMessage> &msg); 375 virtual void stateEntered(); 376 377 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 378 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 379 380private: 381 bool mActive; 382 383 DISALLOW_EVIL_CONSTRUCTORS(ExecutingState); 384}; 385 386//////////////////////////////////////////////////////////////////////////////// 387 388struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState { 389 OutputPortSettingsChangedState(ACodec *codec); 390 391protected: 392 virtual PortMode getPortMode(OMX_U32 portIndex); 393 virtual bool onMessageReceived(const sp<AMessage> &msg); 394 virtual void stateEntered(); 395 396 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 397 virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano); 398 399private: 400 DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState); 401}; 402 403//////////////////////////////////////////////////////////////////////////////// 404 405struct ACodec::ExecutingToIdleState : public ACodec::BaseState { 406 ExecutingToIdleState(ACodec *codec); 407 408protected: 409 virtual bool onMessageReceived(const sp<AMessage> &msg); 410 virtual void stateEntered(); 411 412 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 413 414 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 415 virtual void onInputBufferFilled(const sp<AMessage> &msg); 416 417private: 418 void changeStateIfWeOwnAllBuffers(); 419 420 bool mComponentNowIdle; 421 422 DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState); 423}; 424 425//////////////////////////////////////////////////////////////////////////////// 426 427struct ACodec::IdleToLoadedState : public ACodec::BaseState { 428 IdleToLoadedState(ACodec *codec); 429 430protected: 431 virtual bool onMessageReceived(const sp<AMessage> &msg); 432 virtual void stateEntered(); 433 434 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 435 436private: 437 DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState); 438}; 439 440//////////////////////////////////////////////////////////////////////////////// 441 442struct ACodec::FlushingState : public ACodec::BaseState { 443 FlushingState(ACodec *codec); 444 445protected: 446 virtual bool onMessageReceived(const sp<AMessage> &msg); 447 virtual void stateEntered(); 448 449 virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2); 450 451 virtual void onOutputBufferDrained(const sp<AMessage> &msg); 452 virtual void onInputBufferFilled(const sp<AMessage> &msg); 453 454private: 455 bool mFlushComplete[2]; 456 457 void changeStateIfWeOwnAllBuffers(); 458 459 DISALLOW_EVIL_CONSTRUCTORS(FlushingState); 460}; 461 462//////////////////////////////////////////////////////////////////////////////// 463 464void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) { 465 if (mFenceFd >= 0) { 466 ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s", 467 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 468 } 469 mFenceFd = fenceFd; 470 mIsReadFence = false; 471} 472 473void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) { 474 if (mFenceFd >= 0) { 475 ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s", 476 mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg); 477 } 478 mFenceFd = fenceFd; 479 mIsReadFence = true; 480} 481 482void ACodec::BufferInfo::checkWriteFence(const char *dbg) { 483 if (mFenceFd >= 0 && mIsReadFence) { 484 ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg); 485 } 486} 487 488void ACodec::BufferInfo::checkReadFence(const char *dbg) { 489 if (mFenceFd >= 0 && !mIsReadFence) { 490 ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg); 491 } 492} 493 494//////////////////////////////////////////////////////////////////////////////// 495 496ACodec::ACodec() 497 : mQuirks(0), 498 mNode(0), 499 mUsingNativeWindow(false), 500 mNativeWindowUsageBits(0), 501 mIsVideo(false), 502 mIsEncoder(false), 503 mFatalError(false), 504 mShutdownInProgress(false), 505 mExplicitShutdown(false), 506 mEncoderDelay(0), 507 mEncoderPadding(0), 508 mRotationDegrees(0), 509 mChannelMaskPresent(false), 510 mChannelMask(0), 511 mDequeueCounter(0), 512 mInputMetadataType(kMetadataBufferTypeInvalid), 513 mOutputMetadataType(kMetadataBufferTypeInvalid), 514 mLegacyAdaptiveExperiment(false), 515 mMetadataBuffersToSubmit(0), 516 mNumUndequeuedBuffers(0), 517 mRepeatFrameDelayUs(-1ll), 518 mMaxPtsGapUs(-1ll), 519 mMaxFps(-1), 520 mTimePerFrameUs(-1ll), 521 mTimePerCaptureUs(-1ll), 522 mCreateInputBuffersSuspended(false), 523 mTunneled(false), 524 mDescribeColorAspectsIndex((OMX_INDEXTYPE)0), 525 mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0) { 526 mUninitializedState = new UninitializedState(this); 527 mLoadedState = new LoadedState(this); 528 mLoadedToIdleState = new LoadedToIdleState(this); 529 mIdleToExecutingState = new IdleToExecutingState(this); 530 mExecutingState = new ExecutingState(this); 531 532 mOutputPortSettingsChangedState = 533 new OutputPortSettingsChangedState(this); 534 535 mExecutingToIdleState = new ExecutingToIdleState(this); 536 mIdleToLoadedState = new IdleToLoadedState(this); 537 mFlushingState = new FlushingState(this); 538 539 mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false; 540 mInputEOSResult = OK; 541 542 changeState(mUninitializedState); 543} 544 545ACodec::~ACodec() { 546} 547 548void ACodec::setNotificationMessage(const sp<AMessage> &msg) { 549 mNotify = msg; 550} 551 552void ACodec::initiateSetup(const sp<AMessage> &msg) { 553 msg->setWhat(kWhatSetup); 554 msg->setTarget(this); 555 msg->post(); 556} 557 558void ACodec::signalSetParameters(const sp<AMessage> ¶ms) { 559 sp<AMessage> msg = new AMessage(kWhatSetParameters, this); 560 msg->setMessage("params", params); 561 msg->post(); 562} 563 564void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) { 565 msg->setWhat(kWhatAllocateComponent); 566 msg->setTarget(this); 567 msg->post(); 568} 569 570void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) { 571 msg->setWhat(kWhatConfigureComponent); 572 msg->setTarget(this); 573 msg->post(); 574} 575 576status_t ACodec::setSurface(const sp<Surface> &surface) { 577 sp<AMessage> msg = new AMessage(kWhatSetSurface, this); 578 msg->setObject("surface", surface); 579 580 sp<AMessage> response; 581 status_t err = msg->postAndAwaitResponse(&response); 582 583 if (err == OK) { 584 (void)response->findInt32("err", &err); 585 } 586 return err; 587} 588 589void ACodec::initiateCreateInputSurface() { 590 (new AMessage(kWhatCreateInputSurface, this))->post(); 591} 592 593void ACodec::initiateSetInputSurface( 594 const sp<PersistentSurface> &surface) { 595 sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this); 596 msg->setObject("input-surface", surface); 597 msg->post(); 598} 599 600void ACodec::signalEndOfInputStream() { 601 (new AMessage(kWhatSignalEndOfInputStream, this))->post(); 602} 603 604void ACodec::initiateStart() { 605 (new AMessage(kWhatStart, this))->post(); 606} 607 608void ACodec::signalFlush() { 609 ALOGV("[%s] signalFlush", mComponentName.c_str()); 610 (new AMessage(kWhatFlush, this))->post(); 611} 612 613void ACodec::signalResume() { 614 (new AMessage(kWhatResume, this))->post(); 615} 616 617void ACodec::initiateShutdown(bool keepComponentAllocated) { 618 sp<AMessage> msg = new AMessage(kWhatShutdown, this); 619 msg->setInt32("keepComponentAllocated", keepComponentAllocated); 620 msg->post(); 621 if (!keepComponentAllocated) { 622 // ensure shutdown completes in 3 seconds 623 (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000); 624 } 625} 626 627void ACodec::signalRequestIDRFrame() { 628 (new AMessage(kWhatRequestIDRFrame, this))->post(); 629} 630 631// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 632// Some codecs may return input buffers before having them processed. 633// This causes a halt if we already signaled an EOS on the input 634// port. For now keep submitting an output buffer if there was an 635// EOS on the input port, but not yet on the output port. 636void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() { 637 if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] && 638 mMetadataBuffersToSubmit > 0) { 639 (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post(); 640 } 641} 642 643status_t ACodec::handleSetSurface(const sp<Surface> &surface) { 644 // allow keeping unset surface 645 if (surface == NULL) { 646 if (mNativeWindow != NULL) { 647 ALOGW("cannot unset a surface"); 648 return INVALID_OPERATION; 649 } 650 return OK; 651 } 652 653 // cannot switch from bytebuffers to surface 654 if (mNativeWindow == NULL) { 655 ALOGW("component was not configured with a surface"); 656 return INVALID_OPERATION; 657 } 658 659 ANativeWindow *nativeWindow = surface.get(); 660 // if we have not yet started the codec, we can simply set the native window 661 if (mBuffers[kPortIndexInput].size() == 0) { 662 mNativeWindow = surface; 663 return OK; 664 } 665 666 // we do not support changing a tunneled surface after start 667 if (mTunneled) { 668 ALOGW("cannot change tunneled surface"); 669 return INVALID_OPERATION; 670 } 671 672 int usageBits = 0; 673 status_t err = setupNativeWindowSizeFormatAndUsage(nativeWindow, &usageBits); 674 if (err != OK) { 675 return err; 676 } 677 678 int ignoredFlags = kVideoGrallocUsage; 679 // New output surface is not allowed to add new usage flag except ignored ones. 680 if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) { 681 ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits); 682 return BAD_VALUE; 683 } 684 685 // get min undequeued count. We cannot switch to a surface that has a higher 686 // undequeued count than we allocated. 687 int minUndequeuedBuffers = 0; 688 err = nativeWindow->query( 689 nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 690 &minUndequeuedBuffers); 691 if (err != 0) { 692 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 693 strerror(-err), -err); 694 return err; 695 } 696 if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) { 697 ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)", 698 minUndequeuedBuffers, mNumUndequeuedBuffers); 699 return BAD_VALUE; 700 } 701 702 // we cannot change the number of output buffers while OMX is running 703 // set up surface to the same count 704 Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput]; 705 ALOGV("setting up surface for %zu buffers", buffers.size()); 706 707 err = native_window_set_buffer_count(nativeWindow, buffers.size()); 708 if (err != 0) { 709 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 710 -err); 711 return err; 712 } 713 714 // need to enable allocation when attaching 715 surface->getIGraphicBufferProducer()->allowAllocation(true); 716 717 // for meta data mode, we move dequeud buffers to the new surface. 718 // for non-meta mode, we must move all registered buffers 719 for (size_t i = 0; i < buffers.size(); ++i) { 720 const BufferInfo &info = buffers[i]; 721 // skip undequeued buffers for meta data mode 722 if (storingMetadataInDecodedBuffers() 723 && !mLegacyAdaptiveExperiment 724 && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 725 ALOGV("skipping buffer"); 726 continue; 727 } 728 ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer()); 729 730 err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer()); 731 if (err != OK) { 732 ALOGE("failed to attach buffer %p to the new surface: %s (%d)", 733 info.mGraphicBuffer->getNativeBuffer(), 734 strerror(-err), -err); 735 return err; 736 } 737 } 738 739 // cancel undequeued buffers to new surface 740 if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) { 741 for (size_t i = 0; i < buffers.size(); ++i) { 742 BufferInfo &info = buffers.editItemAt(i); 743 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 744 ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer()); 745 err = nativeWindow->cancelBuffer( 746 nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd); 747 info.mFenceFd = -1; 748 if (err != OK) { 749 ALOGE("failed to cancel buffer %p to the new surface: %s (%d)", 750 info.mGraphicBuffer->getNativeBuffer(), 751 strerror(-err), -err); 752 return err; 753 } 754 } 755 } 756 // disallow further allocation 757 (void)surface->getIGraphicBufferProducer()->allowAllocation(false); 758 } 759 760 // push blank buffers to previous window if requested 761 if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) { 762 pushBlankBuffersToNativeWindow(mNativeWindow.get()); 763 } 764 765 mNativeWindow = nativeWindow; 766 mNativeWindowUsageBits = usageBits; 767 return OK; 768} 769 770status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) { 771 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 772 773 CHECK(mDealer[portIndex] == NULL); 774 CHECK(mBuffers[portIndex].isEmpty()); 775 776 status_t err; 777 if (mNativeWindow != NULL && portIndex == kPortIndexOutput) { 778 if (storingMetadataInDecodedBuffers()) { 779 err = allocateOutputMetadataBuffers(); 780 } else { 781 err = allocateOutputBuffersFromNativeWindow(); 782 } 783 } else { 784 OMX_PARAM_PORTDEFINITIONTYPE def; 785 InitOMXParams(&def); 786 def.nPortIndex = portIndex; 787 788 err = mOMX->getParameter( 789 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 790 791 if (err == OK) { 792 MetadataBufferType type = 793 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 794 size_t bufSize = def.nBufferSize; 795 if (type == kMetadataBufferTypeGrallocSource) { 796 bufSize = sizeof(VideoGrallocMetadata); 797 } else if (type == kMetadataBufferTypeANWBuffer) { 798 bufSize = sizeof(VideoNativeMetadata); 799 } 800 801 // If using gralloc or native source input metadata buffers, allocate largest 802 // metadata size as we prefer to generate native source metadata, but component 803 // may require gralloc source. For camera source, allocate at least enough 804 // size for native metadata buffers. 805 size_t allottedSize = bufSize; 806 if (portIndex == kPortIndexInput && type >= kMetadataBufferTypeGrallocSource) { 807 bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata)); 808 } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) { 809 bufSize = max(bufSize, sizeof(VideoNativeMetadata)); 810 } 811 812 size_t conversionBufferSize = 0; 813 814 sp<DataConverter> converter = mConverter[portIndex]; 815 if (converter != NULL) { 816 // here we assume sane conversions of max 4:1, so result fits in int32 817 if (portIndex == kPortIndexInput) { 818 conversionBufferSize = converter->sourceSize(bufSize); 819 } else { 820 conversionBufferSize = converter->targetSize(bufSize); 821 } 822 } 823 824 size_t alignment = MemoryDealer::getAllocationAlignment(); 825 826 ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port", 827 mComponentName.c_str(), 828 def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type), 829 portIndex == kPortIndexInput ? "input" : "output"); 830 831 // verify buffer sizes to avoid overflow in align() 832 if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) { 833 ALOGE("b/22885421"); 834 return NO_MEMORY; 835 } 836 837 // don't modify bufSize as OMX may not expect it to increase after negotiation 838 size_t alignedSize = align(bufSize, alignment); 839 size_t alignedConvSize = align(conversionBufferSize, alignment); 840 if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) { 841 ALOGE("b/22885421"); 842 return NO_MEMORY; 843 } 844 845 size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize); 846 mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec"); 847 848 for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) { 849 sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize); 850 if (mem == NULL || mem->pointer() == NULL) { 851 return NO_MEMORY; 852 } 853 854 BufferInfo info; 855 info.mStatus = BufferInfo::OWNED_BY_US; 856 info.mFenceFd = -1; 857 info.mRenderInfo = NULL; 858 info.mNativeHandle = NULL; 859 860 uint32_t requiresAllocateBufferBit = 861 (portIndex == kPortIndexInput) 862 ? kRequiresAllocateBufferOnInputPorts 863 : kRequiresAllocateBufferOnOutputPorts; 864 865 if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) { 866 mem.clear(); 867 868 void *ptr = NULL; 869 native_handle_t *native_handle = NULL; 870 err = mOMX->allocateSecureBuffer( 871 mNode, portIndex, bufSize, &info.mBufferID, 872 &ptr, &native_handle); 873 874 // TRICKY: this representation is unorthodox, but ACodec requires 875 // an ABuffer with a proper size to validate range offsets and lengths. 876 // Since mData is never referenced for secure input, it is used to store 877 // either the pointer to the secure buffer, or the opaque handle as on 878 // some devices ptr is actually an opaque handle, not a pointer. 879 880 // TRICKY2: use native handle as the base of the ABuffer if received one, 881 // because Widevine source only receives these base addresses. 882 info.mData = new ABuffer(ptr != NULL ? ptr : (void *)native_handle, bufSize); 883 info.mNativeHandle = NativeHandle::create(native_handle, true /* ownsHandle */); 884 info.mCodecData = info.mData; 885 } else if (mQuirks & requiresAllocateBufferBit) { 886 err = mOMX->allocateBufferWithBackup( 887 mNode, portIndex, mem, &info.mBufferID, allottedSize); 888 } else { 889 err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize); 890 } 891 892 if (mem != NULL) { 893 info.mCodecData = new ABuffer(mem->pointer(), bufSize); 894 info.mCodecRef = mem; 895 896 if (type == kMetadataBufferTypeANWBuffer) { 897 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 898 } 899 900 // if we require conversion, allocate conversion buffer for client use; 901 // otherwise, reuse codec buffer 902 if (mConverter[portIndex] != NULL) { 903 CHECK_GT(conversionBufferSize, (size_t)0); 904 mem = mDealer[portIndex]->allocate(conversionBufferSize); 905 if (mem == NULL|| mem->pointer() == NULL) { 906 return NO_MEMORY; 907 } 908 info.mData = new ABuffer(mem->pointer(), conversionBufferSize); 909 info.mMemRef = mem; 910 } else { 911 info.mData = info.mCodecData; 912 info.mMemRef = info.mCodecRef; 913 } 914 } 915 916 mBuffers[portIndex].push(info); 917 } 918 } 919 } 920 921 if (err != OK) { 922 return err; 923 } 924 925 sp<AMessage> notify = mNotify->dup(); 926 notify->setInt32("what", CodecBase::kWhatBuffersAllocated); 927 928 notify->setInt32("portIndex", portIndex); 929 930 sp<PortDescription> desc = new PortDescription; 931 932 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 933 const BufferInfo &info = mBuffers[portIndex][i]; 934 desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef); 935 } 936 937 notify->setObject("portDesc", desc); 938 notify->post(); 939 940 return OK; 941} 942 943status_t ACodec::setupNativeWindowSizeFormatAndUsage( 944 ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */) { 945 OMX_PARAM_PORTDEFINITIONTYPE def; 946 InitOMXParams(&def); 947 def.nPortIndex = kPortIndexOutput; 948 949 status_t err = mOMX->getParameter( 950 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 951 952 if (err != OK) { 953 return err; 954 } 955 956 OMX_U32 usage = 0; 957 err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage); 958 if (err != 0) { 959 ALOGW("querying usage flags from OMX IL component failed: %d", err); 960 // XXX: Currently this error is logged, but not fatal. 961 usage = 0; 962 } 963 int omxUsage = usage; 964 965 if (mFlags & kFlagIsGrallocUsageProtected) { 966 usage |= GRALLOC_USAGE_PROTECTED; 967 } 968 969 usage |= kVideoGrallocUsage; 970 *finalUsage = usage; 971 972 ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage); 973 return setNativeWindowSizeFormatAndUsage( 974 nativeWindow, 975 def.format.video.nFrameWidth, 976 def.format.video.nFrameHeight, 977 def.format.video.eColorFormat, 978 mRotationDegrees, 979 usage); 980} 981 982status_t ACodec::configureOutputBuffersFromNativeWindow( 983 OMX_U32 *bufferCount, OMX_U32 *bufferSize, 984 OMX_U32 *minUndequeuedBuffers) { 985 OMX_PARAM_PORTDEFINITIONTYPE def; 986 InitOMXParams(&def); 987 def.nPortIndex = kPortIndexOutput; 988 989 status_t err = mOMX->getParameter( 990 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 991 992 if (err == OK) { 993 err = setupNativeWindowSizeFormatAndUsage(mNativeWindow.get(), &mNativeWindowUsageBits); 994 } 995 if (err != OK) { 996 mNativeWindowUsageBits = 0; 997 return err; 998 } 999 1000 // Exits here for tunneled video playback codecs -- i.e. skips native window 1001 // buffer allocation step as this is managed by the tunneled OMX omponent 1002 // itself and explicitly sets def.nBufferCountActual to 0. 1003 if (mTunneled) { 1004 ALOGV("Tunneled Playback: skipping native window buffer allocation."); 1005 def.nBufferCountActual = 0; 1006 err = mOMX->setParameter( 1007 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1008 1009 *minUndequeuedBuffers = 0; 1010 *bufferCount = 0; 1011 *bufferSize = 0; 1012 return err; 1013 } 1014 1015 *minUndequeuedBuffers = 0; 1016 err = mNativeWindow->query( 1017 mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, 1018 (int *)minUndequeuedBuffers); 1019 1020 if (err != 0) { 1021 ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)", 1022 strerror(-err), -err); 1023 return err; 1024 } 1025 1026 // FIXME: assume that surface is controlled by app (native window 1027 // returns the number for the case when surface is not controlled by app) 1028 // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported 1029 // For now, try to allocate 1 more buffer, but don't fail if unsuccessful 1030 1031 // Use conservative allocation while also trying to reduce starvation 1032 // 1033 // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the 1034 // minimum needed for the consumer to be able to work 1035 // 2. try to allocate two (2) additional buffers to reduce starvation from 1036 // the consumer 1037 // plus an extra buffer to account for incorrect minUndequeuedBufs 1038 for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) { 1039 OMX_U32 newBufferCount = 1040 def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers; 1041 def.nBufferCountActual = newBufferCount; 1042 err = mOMX->setParameter( 1043 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 1044 1045 if (err == OK) { 1046 *minUndequeuedBuffers += extraBuffers; 1047 break; 1048 } 1049 1050 ALOGW("[%s] setting nBufferCountActual to %u failed: %d", 1051 mComponentName.c_str(), newBufferCount, err); 1052 /* exit condition */ 1053 if (extraBuffers == 0) { 1054 return err; 1055 } 1056 } 1057 1058 err = native_window_set_buffer_count( 1059 mNativeWindow.get(), def.nBufferCountActual); 1060 1061 if (err != 0) { 1062 ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err), 1063 -err); 1064 return err; 1065 } 1066 1067 *bufferCount = def.nBufferCountActual; 1068 *bufferSize = def.nBufferSize; 1069 return err; 1070} 1071 1072status_t ACodec::allocateOutputBuffersFromNativeWindow() { 1073 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1074 status_t err = configureOutputBuffersFromNativeWindow( 1075 &bufferCount, &bufferSize, &minUndequeuedBuffers); 1076 if (err != 0) 1077 return err; 1078 mNumUndequeuedBuffers = minUndequeuedBuffers; 1079 1080 if (!storingMetadataInDecodedBuffers()) { 1081 static_cast<Surface*>(mNativeWindow.get()) 1082 ->getIGraphicBufferProducer()->allowAllocation(true); 1083 } 1084 1085 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1086 "output port", 1087 mComponentName.c_str(), bufferCount, bufferSize); 1088 1089 // Dequeue buffers and send them to OMX 1090 for (OMX_U32 i = 0; i < bufferCount; i++) { 1091 ANativeWindowBuffer *buf; 1092 int fenceFd; 1093 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1094 if (err != 0) { 1095 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1096 break; 1097 } 1098 1099 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1100 BufferInfo info; 1101 info.mStatus = BufferInfo::OWNED_BY_US; 1102 info.mFenceFd = fenceFd; 1103 info.mIsReadFence = false; 1104 info.mRenderInfo = NULL; 1105 info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */); 1106 info.mCodecData = info.mData; 1107 info.mGraphicBuffer = graphicBuffer; 1108 mBuffers[kPortIndexOutput].push(info); 1109 1110 IOMX::buffer_id bufferId; 1111 err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer, 1112 &bufferId); 1113 if (err != 0) { 1114 ALOGE("registering GraphicBuffer %u with OMX IL component failed: " 1115 "%d", i, err); 1116 break; 1117 } 1118 1119 mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId; 1120 1121 ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)", 1122 mComponentName.c_str(), 1123 bufferId, graphicBuffer.get()); 1124 } 1125 1126 OMX_U32 cancelStart; 1127 OMX_U32 cancelEnd; 1128 1129 if (err != 0) { 1130 // If an error occurred while dequeuing we need to cancel any buffers 1131 // that were dequeued. 1132 cancelStart = 0; 1133 cancelEnd = mBuffers[kPortIndexOutput].size(); 1134 } else { 1135 // Return the required minimum undequeued buffers to the native window. 1136 cancelStart = bufferCount - minUndequeuedBuffers; 1137 cancelEnd = bufferCount; 1138 } 1139 1140 for (OMX_U32 i = cancelStart; i < cancelEnd; i++) { 1141 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1142 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1143 status_t error = cancelBufferToNativeWindow(info); 1144 if (err == 0) { 1145 err = error; 1146 } 1147 } 1148 } 1149 1150 if (!storingMetadataInDecodedBuffers()) { 1151 static_cast<Surface*>(mNativeWindow.get()) 1152 ->getIGraphicBufferProducer()->allowAllocation(false); 1153 } 1154 1155 return err; 1156} 1157 1158status_t ACodec::allocateOutputMetadataBuffers() { 1159 OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers; 1160 status_t err = configureOutputBuffersFromNativeWindow( 1161 &bufferCount, &bufferSize, &minUndequeuedBuffers); 1162 if (err != 0) 1163 return err; 1164 mNumUndequeuedBuffers = minUndequeuedBuffers; 1165 1166 ALOGV("[%s] Allocating %u meta buffers on output port", 1167 mComponentName.c_str(), bufferCount); 1168 1169 size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ? 1170 sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata); 1171 size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment()); 1172 mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec"); 1173 1174 // Dequeue buffers and send them to OMX 1175 for (OMX_U32 i = 0; i < bufferCount; i++) { 1176 BufferInfo info; 1177 info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1178 info.mFenceFd = -1; 1179 info.mRenderInfo = NULL; 1180 info.mGraphicBuffer = NULL; 1181 info.mDequeuedAt = mDequeueCounter; 1182 1183 sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize); 1184 if (mem == NULL || mem->pointer() == NULL) { 1185 return NO_MEMORY; 1186 } 1187 if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1188 ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1; 1189 } 1190 info.mData = new ABuffer(mem->pointer(), mem->size()); 1191 info.mMemRef = mem; 1192 info.mCodecData = info.mData; 1193 info.mCodecRef = mem; 1194 1195 // we use useBuffer for metadata regardless of quirks 1196 err = mOMX->useBuffer( 1197 mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size()); 1198 mBuffers[kPortIndexOutput].push(info); 1199 1200 ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)", 1201 mComponentName.c_str(), info.mBufferID, mem->pointer()); 1202 } 1203 1204 if (mLegacyAdaptiveExperiment) { 1205 // preallocate and preregister buffers 1206 static_cast<Surface *>(mNativeWindow.get()) 1207 ->getIGraphicBufferProducer()->allowAllocation(true); 1208 1209 ALOGV("[%s] Allocating %u buffers from a native window of size %u on " 1210 "output port", 1211 mComponentName.c_str(), bufferCount, bufferSize); 1212 1213 // Dequeue buffers then cancel them all 1214 for (OMX_U32 i = 0; i < bufferCount; i++) { 1215 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1216 1217 ANativeWindowBuffer *buf; 1218 int fenceFd; 1219 err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1220 if (err != 0) { 1221 ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err); 1222 break; 1223 } 1224 1225 sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false)); 1226 mOMX->updateGraphicBufferInMeta( 1227 mNode, kPortIndexOutput, graphicBuffer, info->mBufferID); 1228 info->mStatus = BufferInfo::OWNED_BY_US; 1229 info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy"); 1230 info->mGraphicBuffer = graphicBuffer; 1231 } 1232 1233 for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) { 1234 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1235 if (info->mStatus == BufferInfo::OWNED_BY_US) { 1236 status_t error = cancelBufferToNativeWindow(info); 1237 if (err == OK) { 1238 err = error; 1239 } 1240 } 1241 } 1242 1243 static_cast<Surface*>(mNativeWindow.get()) 1244 ->getIGraphicBufferProducer()->allowAllocation(false); 1245 } 1246 1247 mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers; 1248 return err; 1249} 1250 1251status_t ACodec::submitOutputMetadataBuffer() { 1252 CHECK(storingMetadataInDecodedBuffers()); 1253 if (mMetadataBuffersToSubmit == 0) 1254 return OK; 1255 1256 BufferInfo *info = dequeueBufferFromNativeWindow(); 1257 if (info == NULL) { 1258 return ERROR_IO; 1259 } 1260 1261 ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p", 1262 mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get()); 1263 1264 --mMetadataBuffersToSubmit; 1265 info->checkWriteFence("submitOutputMetadataBuffer"); 1266 status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd); 1267 info->mFenceFd = -1; 1268 if (err == OK) { 1269 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 1270 } 1271 1272 return err; 1273} 1274 1275status_t ACodec::waitForFence(int fd, const char *dbg ) { 1276 status_t res = OK; 1277 if (fd >= 0) { 1278 sp<Fence> fence = new Fence(fd); 1279 res = fence->wait(IOMX::kFenceTimeoutMs); 1280 ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg); 1281 } 1282 return res; 1283} 1284 1285// static 1286const char *ACodec::_asString(BufferInfo::Status s) { 1287 switch (s) { 1288 case BufferInfo::OWNED_BY_US: return "OUR"; 1289 case BufferInfo::OWNED_BY_COMPONENT: return "COMPONENT"; 1290 case BufferInfo::OWNED_BY_UPSTREAM: return "UPSTREAM"; 1291 case BufferInfo::OWNED_BY_DOWNSTREAM: return "DOWNSTREAM"; 1292 case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE"; 1293 case BufferInfo::UNRECOGNIZED: return "UNRECOGNIZED"; 1294 default: return "?"; 1295 } 1296} 1297 1298void ACodec::dumpBuffers(OMX_U32 portIndex) { 1299 CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput); 1300 ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(), 1301 portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size()); 1302 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1303 const BufferInfo &info = mBuffers[portIndex][i]; 1304 ALOGI(" slot %2zu: #%8u %p/%p %s(%d) dequeued:%u", 1305 i, info.mBufferID, info.mGraphicBuffer.get(), 1306 info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(), 1307 _asString(info.mStatus), info.mStatus, info.mDequeuedAt); 1308 } 1309} 1310 1311status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) { 1312 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 1313 1314 ALOGV("[%s] Calling cancelBuffer on buffer %u", 1315 mComponentName.c_str(), info->mBufferID); 1316 1317 info->checkWriteFence("cancelBufferToNativeWindow"); 1318 int err = mNativeWindow->cancelBuffer( 1319 mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 1320 info->mFenceFd = -1; 1321 1322 ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window", 1323 mComponentName.c_str(), info->mBufferID); 1324 // change ownership even if cancelBuffer fails 1325 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 1326 1327 return err; 1328} 1329 1330void ACodec::updateRenderInfoForDequeuedBuffer( 1331 ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) { 1332 1333 info->mRenderInfo = 1334 mRenderTracker.updateInfoForDequeuedBuffer( 1335 buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]); 1336 1337 // check for any fences already signaled 1338 notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo); 1339} 1340 1341void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 1342 if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) { 1343 mRenderTracker.dumpRenderQueue(); 1344 } 1345} 1346 1347void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) { 1348 sp<AMessage> msg = mNotify->dup(); 1349 msg->setInt32("what", CodecBase::kWhatOutputFramesRendered); 1350 std::list<FrameRenderTracker::Info> done = 1351 mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete); 1352 1353 // unlink untracked frames 1354 for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin(); 1355 it != done.cend(); ++it) { 1356 ssize_t index = it->getIndex(); 1357 if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) { 1358 mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL; 1359 } else if (index >= 0) { 1360 // THIS SHOULD NEVER HAPPEN 1361 ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size()); 1362 } 1363 } 1364 1365 if (MediaCodec::CreateFramesRenderedMessage(done, msg)) { 1366 msg->post(); 1367 } 1368} 1369 1370ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() { 1371 ANativeWindowBuffer *buf; 1372 CHECK(mNativeWindow.get() != NULL); 1373 1374 if (mTunneled) { 1375 ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel" 1376 " video playback mode mode!"); 1377 return NULL; 1378 } 1379 1380 if (mFatalError) { 1381 ALOGW("not dequeuing from native window due to fatal error"); 1382 return NULL; 1383 } 1384 1385 int fenceFd = -1; 1386 do { 1387 status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd); 1388 if (err != 0) { 1389 ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err); 1390 return NULL; 1391 } 1392 1393 bool stale = false; 1394 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1395 i--; 1396 BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i); 1397 1398 if (info->mGraphicBuffer != NULL && 1399 info->mGraphicBuffer->handle == buf->handle) { 1400 // Since consumers can attach buffers to BufferQueues, it is possible 1401 // that a known yet stale buffer can return from a surface that we 1402 // once used. We can simply ignore this as we have already dequeued 1403 // this buffer properly. NOTE: this does not eliminate all cases, 1404 // e.g. it is possible that we have queued the valid buffer to the 1405 // NW, and a stale copy of the same buffer gets dequeued - which will 1406 // be treated as the valid buffer by ACodec. 1407 if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 1408 ALOGI("dequeued stale buffer %p. discarding", buf); 1409 stale = true; 1410 break; 1411 } 1412 1413 ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer()); 1414 info->mStatus = BufferInfo::OWNED_BY_US; 1415 info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow"); 1416 updateRenderInfoForDequeuedBuffer(buf, fenceFd, info); 1417 return info; 1418 } 1419 } 1420 1421 // It is also possible to receive a previously unregistered buffer 1422 // in non-meta mode. These should be treated as stale buffers. The 1423 // same is possible in meta mode, in which case, it will be treated 1424 // as a normal buffer, which is not desirable. 1425 // TODO: fix this. 1426 if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) { 1427 ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf); 1428 stale = true; 1429 } 1430 if (stale) { 1431 // TODO: detach stale buffer, but there is no API yet to do it. 1432 buf = NULL; 1433 } 1434 } while (buf == NULL); 1435 1436 // get oldest undequeued buffer 1437 BufferInfo *oldest = NULL; 1438 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1439 i--; 1440 BufferInfo *info = 1441 &mBuffers[kPortIndexOutput].editItemAt(i); 1442 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW && 1443 (oldest == NULL || 1444 // avoid potential issues from counter rolling over 1445 mDequeueCounter - info->mDequeuedAt > 1446 mDequeueCounter - oldest->mDequeuedAt)) { 1447 oldest = info; 1448 } 1449 } 1450 1451 // it is impossible dequeue a buffer when there are no buffers with ANW 1452 CHECK(oldest != NULL); 1453 // it is impossible to dequeue an unknown buffer in non-meta mode, as the 1454 // while loop above does not complete 1455 CHECK(storingMetadataInDecodedBuffers()); 1456 1457 // discard buffer in LRU info and replace with new buffer 1458 oldest->mGraphicBuffer = new GraphicBuffer(buf, false); 1459 oldest->mStatus = BufferInfo::OWNED_BY_US; 1460 oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest"); 1461 mRenderTracker.untrackFrame(oldest->mRenderInfo); 1462 oldest->mRenderInfo = NULL; 1463 1464 mOMX->updateGraphicBufferInMeta( 1465 mNode, kPortIndexOutput, oldest->mGraphicBuffer, 1466 oldest->mBufferID); 1467 1468 if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) { 1469 VideoGrallocMetadata *grallocMeta = 1470 reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base()); 1471 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1472 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1473 mDequeueCounter - oldest->mDequeuedAt, 1474 (void *)(uintptr_t)grallocMeta->pHandle, 1475 oldest->mGraphicBuffer->handle, oldest->mData->base()); 1476 } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) { 1477 VideoNativeMetadata *nativeMeta = 1478 reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base()); 1479 ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)", 1480 (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]), 1481 mDequeueCounter - oldest->mDequeuedAt, 1482 (void *)(uintptr_t)nativeMeta->pBuffer, 1483 oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base()); 1484 } 1485 1486 updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest); 1487 return oldest; 1488} 1489 1490status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) { 1491 status_t err = OK; 1492 for (size_t i = mBuffers[portIndex].size(); i > 0;) { 1493 i--; 1494 status_t err2 = freeBuffer(portIndex, i); 1495 if (err == OK) { 1496 err = err2; 1497 } 1498 } 1499 1500 // clear mDealer even on an error 1501 mDealer[portIndex].clear(); 1502 return err; 1503} 1504 1505status_t ACodec::freeOutputBuffersNotOwnedByComponent() { 1506 status_t err = OK; 1507 for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) { 1508 i--; 1509 BufferInfo *info = 1510 &mBuffers[kPortIndexOutput].editItemAt(i); 1511 1512 // At this time some buffers may still be with the component 1513 // or being drained. 1514 if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT && 1515 info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) { 1516 status_t err2 = freeBuffer(kPortIndexOutput, i); 1517 if (err == OK) { 1518 err = err2; 1519 } 1520 } 1521 } 1522 1523 return err; 1524} 1525 1526status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) { 1527 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1528 status_t err = OK; 1529 1530 // there should not be any fences in the metadata 1531 MetadataBufferType type = 1532 portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType; 1533 if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL 1534 && info->mData->size() >= sizeof(VideoNativeMetadata)) { 1535 int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd; 1536 if (fenceFd >= 0) { 1537 ALOGW("unreleased fence (%d) in %s metadata buffer %zu", 1538 fenceFd, portIndex == kPortIndexInput ? "input" : "output", i); 1539 } 1540 } 1541 1542 switch (info->mStatus) { 1543 case BufferInfo::OWNED_BY_US: 1544 if (portIndex == kPortIndexOutput && mNativeWindow != NULL) { 1545 (void)cancelBufferToNativeWindow(info); 1546 } 1547 // fall through 1548 1549 case BufferInfo::OWNED_BY_NATIVE_WINDOW: 1550 err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID); 1551 break; 1552 1553 default: 1554 ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus); 1555 err = FAILED_TRANSACTION; 1556 break; 1557 } 1558 1559 if (info->mFenceFd >= 0) { 1560 ::close(info->mFenceFd); 1561 } 1562 1563 if (portIndex == kPortIndexOutput) { 1564 mRenderTracker.untrackFrame(info->mRenderInfo, i); 1565 info->mRenderInfo = NULL; 1566 } 1567 1568 // remove buffer even if mOMX->freeBuffer fails 1569 mBuffers[portIndex].removeAt(i); 1570 return err; 1571} 1572 1573ACodec::BufferInfo *ACodec::findBufferByID( 1574 uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) { 1575 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 1576 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 1577 1578 if (info->mBufferID == bufferID) { 1579 if (index != NULL) { 1580 *index = i; 1581 } 1582 return info; 1583 } 1584 } 1585 1586 ALOGE("Could not find buffer with ID %u", bufferID); 1587 return NULL; 1588} 1589 1590status_t ACodec::setComponentRole( 1591 bool isEncoder, const char *mime) { 1592 const char *role = getComponentRole(isEncoder, mime); 1593 if (role == NULL) { 1594 return BAD_VALUE; 1595 } 1596 status_t err = setComponentRole(mOMX, mNode, role); 1597 if (err != OK) { 1598 ALOGW("[%s] Failed to set standard component role '%s'.", 1599 mComponentName.c_str(), role); 1600 } 1601 return err; 1602} 1603 1604//static 1605const char *ACodec::getComponentRole( 1606 bool isEncoder, const char *mime) { 1607 struct MimeToRole { 1608 const char *mime; 1609 const char *decoderRole; 1610 const char *encoderRole; 1611 }; 1612 1613 static const MimeToRole kMimeToRole[] = { 1614 { MEDIA_MIMETYPE_AUDIO_MPEG, 1615 "audio_decoder.mp3", "audio_encoder.mp3" }, 1616 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I, 1617 "audio_decoder.mp1", "audio_encoder.mp1" }, 1618 { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II, 1619 "audio_decoder.mp2", "audio_encoder.mp2" }, 1620 { MEDIA_MIMETYPE_AUDIO_AMR_NB, 1621 "audio_decoder.amrnb", "audio_encoder.amrnb" }, 1622 { MEDIA_MIMETYPE_AUDIO_AMR_WB, 1623 "audio_decoder.amrwb", "audio_encoder.amrwb" }, 1624 { MEDIA_MIMETYPE_AUDIO_AAC, 1625 "audio_decoder.aac", "audio_encoder.aac" }, 1626 { MEDIA_MIMETYPE_AUDIO_VORBIS, 1627 "audio_decoder.vorbis", "audio_encoder.vorbis" }, 1628 { MEDIA_MIMETYPE_AUDIO_OPUS, 1629 "audio_decoder.opus", "audio_encoder.opus" }, 1630 { MEDIA_MIMETYPE_AUDIO_G711_MLAW, 1631 "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" }, 1632 { MEDIA_MIMETYPE_AUDIO_G711_ALAW, 1633 "audio_decoder.g711alaw", "audio_encoder.g711alaw" }, 1634 { MEDIA_MIMETYPE_VIDEO_AVC, 1635 "video_decoder.avc", "video_encoder.avc" }, 1636 { MEDIA_MIMETYPE_VIDEO_HEVC, 1637 "video_decoder.hevc", "video_encoder.hevc" }, 1638 { MEDIA_MIMETYPE_VIDEO_MPEG4, 1639 "video_decoder.mpeg4", "video_encoder.mpeg4" }, 1640 { MEDIA_MIMETYPE_VIDEO_H263, 1641 "video_decoder.h263", "video_encoder.h263" }, 1642 { MEDIA_MIMETYPE_VIDEO_VP8, 1643 "video_decoder.vp8", "video_encoder.vp8" }, 1644 { MEDIA_MIMETYPE_VIDEO_VP9, 1645 "video_decoder.vp9", "video_encoder.vp9" }, 1646 { MEDIA_MIMETYPE_AUDIO_RAW, 1647 "audio_decoder.raw", "audio_encoder.raw" }, 1648 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, 1649 "video_decoder.dolby-vision", "video_encoder.dolby-vision" }, 1650 { MEDIA_MIMETYPE_AUDIO_FLAC, 1651 "audio_decoder.flac", "audio_encoder.flac" }, 1652 { MEDIA_MIMETYPE_AUDIO_MSGSM, 1653 "audio_decoder.gsm", "audio_encoder.gsm" }, 1654 { MEDIA_MIMETYPE_VIDEO_MPEG2, 1655 "video_decoder.mpeg2", "video_encoder.mpeg2" }, 1656 { MEDIA_MIMETYPE_AUDIO_AC3, 1657 "audio_decoder.ac3", "audio_encoder.ac3" }, 1658 { MEDIA_MIMETYPE_AUDIO_EAC3, 1659 "audio_decoder.eac3", "audio_encoder.eac3" }, 1660 }; 1661 1662 static const size_t kNumMimeToRole = 1663 sizeof(kMimeToRole) / sizeof(kMimeToRole[0]); 1664 1665 size_t i; 1666 for (i = 0; i < kNumMimeToRole; ++i) { 1667 if (!strcasecmp(mime, kMimeToRole[i].mime)) { 1668 break; 1669 } 1670 } 1671 1672 if (i == kNumMimeToRole) { 1673 return NULL; 1674 } 1675 1676 return isEncoder ? kMimeToRole[i].encoderRole 1677 : kMimeToRole[i].decoderRole; 1678} 1679 1680//static 1681status_t ACodec::setComponentRole( 1682 const sp<IOMX> &omx, IOMX::node_id node, const char *role) { 1683 OMX_PARAM_COMPONENTROLETYPE roleParams; 1684 InitOMXParams(&roleParams); 1685 1686 strncpy((char *)roleParams.cRole, 1687 role, OMX_MAX_STRINGNAME_SIZE - 1); 1688 1689 roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0'; 1690 1691 return omx->setParameter( 1692 node, OMX_IndexParamStandardComponentRole, 1693 &roleParams, sizeof(roleParams)); 1694} 1695 1696status_t ACodec::configureCodec( 1697 const char *mime, const sp<AMessage> &msg) { 1698 int32_t encoder; 1699 if (!msg->findInt32("encoder", &encoder)) { 1700 encoder = false; 1701 } 1702 1703 sp<AMessage> inputFormat = new AMessage; 1704 sp<AMessage> outputFormat = new AMessage; 1705 mConfigFormat = msg; 1706 1707 mIsEncoder = encoder; 1708 1709 mInputMetadataType = kMetadataBufferTypeInvalid; 1710 mOutputMetadataType = kMetadataBufferTypeInvalid; 1711 1712 status_t err = setComponentRole(encoder /* isEncoder */, mime); 1713 1714 if (err != OK) { 1715 return err; 1716 } 1717 1718 int32_t bitRate = 0; 1719 // FLAC encoder doesn't need a bitrate, other encoders do 1720 if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC) 1721 && !msg->findInt32("bitrate", &bitRate)) { 1722 return INVALID_OPERATION; 1723 } 1724 1725 int32_t storeMeta; 1726 if (encoder 1727 && msg->findInt32("store-metadata-in-buffers", &storeMeta) 1728 && storeMeta != 0) { 1729 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType); 1730 if (err != OK) { 1731 ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d", 1732 mComponentName.c_str(), err); 1733 1734 return err; 1735 } 1736 // For this specific case we could be using camera source even if storeMetaDataInBuffers 1737 // returns Gralloc source. Pretend that we are; this will force us to use nBufferSize. 1738 if (mInputMetadataType == kMetadataBufferTypeGrallocSource) { 1739 mInputMetadataType = kMetadataBufferTypeCameraSource; 1740 } 1741 1742 uint32_t usageBits; 1743 if (mOMX->getParameter( 1744 mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 1745 &usageBits, sizeof(usageBits)) == OK) { 1746 inputFormat->setInt32( 1747 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 1748 } 1749 } 1750 1751 int32_t prependSPSPPS = 0; 1752 if (encoder 1753 && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS) 1754 && prependSPSPPS != 0) { 1755 OMX_INDEXTYPE index; 1756 err = mOMX->getExtensionIndex( 1757 mNode, 1758 "OMX.google.android.index.prependSPSPPSToIDRFrames", 1759 &index); 1760 1761 if (err == OK) { 1762 PrependSPSPPSToIDRFramesParams params; 1763 InitOMXParams(¶ms); 1764 params.bEnable = OMX_TRUE; 1765 1766 err = mOMX->setParameter( 1767 mNode, index, ¶ms, sizeof(params)); 1768 } 1769 1770 if (err != OK) { 1771 ALOGE("Encoder could not be configured to emit SPS/PPS before " 1772 "IDR frames. (err %d)", err); 1773 1774 return err; 1775 } 1776 } 1777 1778 // Only enable metadata mode on encoder output if encoder can prepend 1779 // sps/pps to idr frames, since in metadata mode the bitstream is in an 1780 // opaque handle, to which we don't have access. 1781 int32_t video = !strncasecmp(mime, "video/", 6); 1782 mIsVideo = video; 1783 if (encoder && video) { 1784 OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS 1785 && msg->findInt32("store-metadata-in-buffers-output", &storeMeta) 1786 && storeMeta != 0); 1787 1788 err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType); 1789 if (err != OK) { 1790 ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d", 1791 mComponentName.c_str(), err); 1792 } 1793 1794 if (!msg->findInt64( 1795 "repeat-previous-frame-after", 1796 &mRepeatFrameDelayUs)) { 1797 mRepeatFrameDelayUs = -1ll; 1798 } 1799 1800 if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) { 1801 mMaxPtsGapUs = -1ll; 1802 } 1803 1804 if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) { 1805 mMaxFps = -1; 1806 } 1807 1808 if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) { 1809 mTimePerCaptureUs = -1ll; 1810 } 1811 1812 if (!msg->findInt32( 1813 "create-input-buffers-suspended", 1814 (int32_t*)&mCreateInputBuffersSuspended)) { 1815 mCreateInputBuffersSuspended = false; 1816 } 1817 } 1818 1819 // NOTE: we only use native window for video decoders 1820 sp<RefBase> obj; 1821 bool haveNativeWindow = msg->findObject("native-window", &obj) 1822 && obj != NULL && video && !encoder; 1823 mUsingNativeWindow = haveNativeWindow; 1824 mLegacyAdaptiveExperiment = false; 1825 if (video && !encoder) { 1826 inputFormat->setInt32("adaptive-playback", false); 1827 1828 int32_t usageProtected; 1829 if (msg->findInt32("protected", &usageProtected) && usageProtected) { 1830 if (!haveNativeWindow) { 1831 ALOGE("protected output buffers must be sent to an ANativeWindow"); 1832 return PERMISSION_DENIED; 1833 } 1834 mFlags |= kFlagIsGrallocUsageProtected; 1835 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1836 } 1837 1838 if (mFlags & kFlagIsSecure) { 1839 // use native_handles for secure input buffers 1840 err = mOMX->enableNativeBuffers( 1841 mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE); 1842 ALOGI_IF(err != OK, "falling back to non-native_handles"); 1843 err = OK; // ignore error for now 1844 } 1845 } 1846 if (haveNativeWindow) { 1847 sp<ANativeWindow> nativeWindow = 1848 static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get())); 1849 1850 // START of temporary support for automatic FRC - THIS WILL BE REMOVED 1851 int32_t autoFrc; 1852 if (msg->findInt32("auto-frc", &autoFrc)) { 1853 bool enabled = autoFrc; 1854 OMX_CONFIG_BOOLEANTYPE config; 1855 InitOMXParams(&config); 1856 config.bEnabled = (OMX_BOOL)enabled; 1857 status_t temp = mOMX->setConfig( 1858 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion, 1859 &config, sizeof(config)); 1860 if (temp == OK) { 1861 outputFormat->setInt32("auto-frc", enabled); 1862 } else if (enabled) { 1863 ALOGI("codec does not support requested auto-frc (err %d)", temp); 1864 } 1865 } 1866 // END of temporary support for automatic FRC 1867 1868 int32_t tunneled; 1869 if (msg->findInt32("feature-tunneled-playback", &tunneled) && 1870 tunneled != 0) { 1871 ALOGI("Configuring TUNNELED video playback."); 1872 mTunneled = true; 1873 1874 int32_t audioHwSync = 0; 1875 if (!msg->findInt32("audio-hw-sync", &audioHwSync)) { 1876 ALOGW("No Audio HW Sync provided for video tunnel"); 1877 } 1878 err = configureTunneledVideoPlayback(audioHwSync, nativeWindow); 1879 if (err != OK) { 1880 ALOGE("configureTunneledVideoPlayback(%d,%p) failed!", 1881 audioHwSync, nativeWindow.get()); 1882 return err; 1883 } 1884 1885 int32_t maxWidth = 0, maxHeight = 0; 1886 if (msg->findInt32("max-width", &maxWidth) && 1887 msg->findInt32("max-height", &maxHeight)) { 1888 1889 err = mOMX->prepareForAdaptivePlayback( 1890 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight); 1891 if (err != OK) { 1892 ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d", 1893 mComponentName.c_str(), err); 1894 // allow failure 1895 err = OK; 1896 } else { 1897 inputFormat->setInt32("max-width", maxWidth); 1898 inputFormat->setInt32("max-height", maxHeight); 1899 inputFormat->setInt32("adaptive-playback", true); 1900 } 1901 } 1902 } else { 1903 ALOGV("Configuring CPU controlled video playback."); 1904 mTunneled = false; 1905 1906 // Explicity reset the sideband handle of the window for 1907 // non-tunneled video in case the window was previously used 1908 // for a tunneled video playback. 1909 err = native_window_set_sideband_stream(nativeWindow.get(), NULL); 1910 if (err != OK) { 1911 ALOGE("set_sideband_stream(NULL) failed! (err %d).", err); 1912 return err; 1913 } 1914 1915 // Always try to enable dynamic output buffers on native surface 1916 err = mOMX->storeMetaDataInBuffers( 1917 mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType); 1918 if (err != OK) { 1919 ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d", 1920 mComponentName.c_str(), err); 1921 1922 // if adaptive playback has been requested, try JB fallback 1923 // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS 1924 // LARGE MEMORY REQUIREMENT 1925 1926 // we will not do adaptive playback on software accessed 1927 // surfaces as they never had to respond to changes in the 1928 // crop window, and we don't trust that they will be able to. 1929 int usageBits = 0; 1930 bool canDoAdaptivePlayback; 1931 1932 if (nativeWindow->query( 1933 nativeWindow.get(), 1934 NATIVE_WINDOW_CONSUMER_USAGE_BITS, 1935 &usageBits) != OK) { 1936 canDoAdaptivePlayback = false; 1937 } else { 1938 canDoAdaptivePlayback = 1939 (usageBits & 1940 (GRALLOC_USAGE_SW_READ_MASK | 1941 GRALLOC_USAGE_SW_WRITE_MASK)) == 0; 1942 } 1943 1944 int32_t maxWidth = 0, maxHeight = 0; 1945 if (canDoAdaptivePlayback && 1946 msg->findInt32("max-width", &maxWidth) && 1947 msg->findInt32("max-height", &maxHeight)) { 1948 ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)", 1949 mComponentName.c_str(), maxWidth, maxHeight); 1950 1951 err = mOMX->prepareForAdaptivePlayback( 1952 mNode, kPortIndexOutput, OMX_TRUE, maxWidth, 1953 maxHeight); 1954 ALOGW_IF(err != OK, 1955 "[%s] prepareForAdaptivePlayback failed w/ err %d", 1956 mComponentName.c_str(), err); 1957 1958 if (err == OK) { 1959 inputFormat->setInt32("max-width", maxWidth); 1960 inputFormat->setInt32("max-height", maxHeight); 1961 inputFormat->setInt32("adaptive-playback", true); 1962 } 1963 } 1964 // allow failure 1965 err = OK; 1966 } else { 1967 ALOGV("[%s] storeMetaDataInBuffers succeeded", 1968 mComponentName.c_str()); 1969 CHECK(storingMetadataInDecodedBuffers()); 1970 mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled( 1971 "legacy-adaptive", !msg->contains("no-experiments")); 1972 1973 inputFormat->setInt32("adaptive-playback", true); 1974 } 1975 1976 int32_t push; 1977 if (msg->findInt32("push-blank-buffers-on-shutdown", &push) 1978 && push != 0) { 1979 mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 1980 } 1981 } 1982 1983 int32_t rotationDegrees; 1984 if (msg->findInt32("rotation-degrees", &rotationDegrees)) { 1985 mRotationDegrees = rotationDegrees; 1986 } else { 1987 mRotationDegrees = 0; 1988 } 1989 } 1990 1991 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 1992 (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 1993 // invalid encodings will default to PCM-16bit in setupRawAudioFormat. 1994 1995 if (video) { 1996 // determine need for software renderer 1997 bool usingSwRenderer = false; 1998 if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) { 1999 usingSwRenderer = true; 2000 haveNativeWindow = false; 2001 } 2002 2003 if (encoder) { 2004 err = setupVideoEncoder(mime, msg, outputFormat, inputFormat); 2005 } else { 2006 err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2007 } 2008 2009 if (err != OK) { 2010 return err; 2011 } 2012 2013 if (haveNativeWindow) { 2014 mNativeWindow = static_cast<Surface *>(obj.get()); 2015 } 2016 2017 // initialize native window now to get actual output format 2018 // TODO: this is needed for some encoders even though they don't use native window 2019 err = initNativeWindow(); 2020 if (err != OK) { 2021 return err; 2022 } 2023 2024 // fallback for devices that do not handle flex-YUV for native buffers 2025 if (haveNativeWindow) { 2026 int32_t requestedColorFormat = OMX_COLOR_FormatUnused; 2027 if (msg->findInt32("color-format", &requestedColorFormat) && 2028 requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) { 2029 status_t err = getPortFormat(kPortIndexOutput, outputFormat); 2030 if (err != OK) { 2031 return err; 2032 } 2033 int32_t colorFormat = OMX_COLOR_FormatUnused; 2034 OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused; 2035 if (!outputFormat->findInt32("color-format", &colorFormat)) { 2036 ALOGE("ouptut port did not have a color format (wrong domain?)"); 2037 return BAD_VALUE; 2038 } 2039 ALOGD("[%s] Requested output format %#x and got %#x.", 2040 mComponentName.c_str(), requestedColorFormat, colorFormat); 2041 if (!isFlexibleColorFormat( 2042 mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent) 2043 || flexibleEquivalent != (OMX_U32)requestedColorFormat) { 2044 // device did not handle flex-YUV request for native window, fall back 2045 // to SW renderer 2046 ALOGI("[%s] Falling back to software renderer", mComponentName.c_str()); 2047 mNativeWindow.clear(); 2048 mNativeWindowUsageBits = 0; 2049 haveNativeWindow = false; 2050 usingSwRenderer = true; 2051 if (storingMetadataInDecodedBuffers()) { 2052 err = mOMX->storeMetaDataInBuffers( 2053 mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType); 2054 mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case 2055 // TODO: implement adaptive-playback support for bytebuffer mode. 2056 // This is done by SW codecs, but most HW codecs don't support it. 2057 inputFormat->setInt32("adaptive-playback", false); 2058 } 2059 if (err == OK) { 2060 err = mOMX->enableNativeBuffers( 2061 mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 2062 } 2063 if (mFlags & kFlagIsGrallocUsageProtected) { 2064 // fallback is not supported for protected playback 2065 err = PERMISSION_DENIED; 2066 } else if (err == OK) { 2067 err = setupVideoDecoder( 2068 mime, msg, haveNativeWindow, usingSwRenderer, outputFormat); 2069 } 2070 } 2071 } 2072 } 2073 2074 if (usingSwRenderer) { 2075 outputFormat->setInt32("using-sw-renderer", 1); 2076 } 2077 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) { 2078 int32_t numChannels, sampleRate; 2079 if (!msg->findInt32("channel-count", &numChannels) 2080 || !msg->findInt32("sample-rate", &sampleRate)) { 2081 // Since we did not always check for these, leave them optional 2082 // and have the decoder figure it all out. 2083 err = OK; 2084 } else { 2085 err = setupRawAudioFormat( 2086 encoder ? kPortIndexInput : kPortIndexOutput, 2087 sampleRate, 2088 numChannels); 2089 } 2090 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) { 2091 int32_t numChannels, sampleRate; 2092 if (!msg->findInt32("channel-count", &numChannels) 2093 || !msg->findInt32("sample-rate", &sampleRate)) { 2094 err = INVALID_OPERATION; 2095 } else { 2096 int32_t isADTS, aacProfile; 2097 int32_t sbrMode; 2098 int32_t maxOutputChannelCount; 2099 int32_t pcmLimiterEnable; 2100 drcParams_t drc; 2101 if (!msg->findInt32("is-adts", &isADTS)) { 2102 isADTS = 0; 2103 } 2104 if (!msg->findInt32("aac-profile", &aacProfile)) { 2105 aacProfile = OMX_AUDIO_AACObjectNull; 2106 } 2107 if (!msg->findInt32("aac-sbr-mode", &sbrMode)) { 2108 sbrMode = -1; 2109 } 2110 2111 if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) { 2112 maxOutputChannelCount = -1; 2113 } 2114 if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) { 2115 // value is unknown 2116 pcmLimiterEnable = -1; 2117 } 2118 if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) { 2119 // value is unknown 2120 drc.encodedTargetLevel = -1; 2121 } 2122 if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) { 2123 // value is unknown 2124 drc.drcCut = -1; 2125 } 2126 if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) { 2127 // value is unknown 2128 drc.drcBoost = -1; 2129 } 2130 if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) { 2131 // value is unknown 2132 drc.heavyCompression = -1; 2133 } 2134 if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) { 2135 // value is unknown 2136 drc.targetRefLevel = -1; 2137 } 2138 2139 err = setupAACCodec( 2140 encoder, numChannels, sampleRate, bitRate, aacProfile, 2141 isADTS != 0, sbrMode, maxOutputChannelCount, drc, 2142 pcmLimiterEnable); 2143 } 2144 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) { 2145 err = setupAMRCodec(encoder, false /* isWAMR */, bitRate); 2146 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) { 2147 err = setupAMRCodec(encoder, true /* isWAMR */, bitRate); 2148 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW) 2149 || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) { 2150 // These are PCM-like formats with a fixed sample rate but 2151 // a variable number of channels. 2152 2153 int32_t numChannels; 2154 if (!msg->findInt32("channel-count", &numChannels)) { 2155 err = INVALID_OPERATION; 2156 } else { 2157 int32_t sampleRate; 2158 if (!msg->findInt32("sample-rate", &sampleRate)) { 2159 sampleRate = 8000; 2160 } 2161 err = setupG711Codec(encoder, sampleRate, numChannels); 2162 } 2163 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) { 2164 int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1; 2165 if (encoder && 2166 (!msg->findInt32("channel-count", &numChannels) 2167 || !msg->findInt32("sample-rate", &sampleRate))) { 2168 ALOGE("missing channel count or sample rate for FLAC encoder"); 2169 err = INVALID_OPERATION; 2170 } else { 2171 if (encoder) { 2172 if (!msg->findInt32( 2173 "complexity", &compressionLevel) && 2174 !msg->findInt32( 2175 "flac-compression-level", &compressionLevel)) { 2176 compressionLevel = 5; // default FLAC compression level 2177 } else if (compressionLevel < 0) { 2178 ALOGW("compression level %d outside [0..8] range, " 2179 "using 0", 2180 compressionLevel); 2181 compressionLevel = 0; 2182 } else if (compressionLevel > 8) { 2183 ALOGW("compression level %d outside [0..8] range, " 2184 "using 8", 2185 compressionLevel); 2186 compressionLevel = 8; 2187 } 2188 } 2189 err = setupFlacCodec( 2190 encoder, numChannels, sampleRate, compressionLevel); 2191 } 2192 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) { 2193 int32_t numChannels, sampleRate; 2194 if (encoder 2195 || !msg->findInt32("channel-count", &numChannels) 2196 || !msg->findInt32("sample-rate", &sampleRate)) { 2197 err = INVALID_OPERATION; 2198 } else { 2199 err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding); 2200 } 2201 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) { 2202 int32_t numChannels; 2203 int32_t sampleRate; 2204 if (!msg->findInt32("channel-count", &numChannels) 2205 || !msg->findInt32("sample-rate", &sampleRate)) { 2206 err = INVALID_OPERATION; 2207 } else { 2208 err = setupAC3Codec(encoder, numChannels, sampleRate); 2209 } 2210 } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) { 2211 int32_t numChannels; 2212 int32_t sampleRate; 2213 if (!msg->findInt32("channel-count", &numChannels) 2214 || !msg->findInt32("sample-rate", &sampleRate)) { 2215 err = INVALID_OPERATION; 2216 } else { 2217 err = setupEAC3Codec(encoder, numChannels, sampleRate); 2218 } 2219 } 2220 2221 if (err != OK) { 2222 return err; 2223 } 2224 2225 if (!msg->findInt32("encoder-delay", &mEncoderDelay)) { 2226 mEncoderDelay = 0; 2227 } 2228 2229 if (!msg->findInt32("encoder-padding", &mEncoderPadding)) { 2230 mEncoderPadding = 0; 2231 } 2232 2233 if (msg->findInt32("channel-mask", &mChannelMask)) { 2234 mChannelMaskPresent = true; 2235 } else { 2236 mChannelMaskPresent = false; 2237 } 2238 2239 int32_t maxInputSize; 2240 if (msg->findInt32("max-input-size", &maxInputSize)) { 2241 err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize); 2242 } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) { 2243 err = setMinBufferSize(kPortIndexInput, 8192); // XXX 2244 } 2245 2246 int32_t priority; 2247 if (msg->findInt32("priority", &priority)) { 2248 err = setPriority(priority); 2249 } 2250 2251 int32_t rateInt = -1; 2252 float rateFloat = -1; 2253 if (!msg->findFloat("operating-rate", &rateFloat)) { 2254 msg->findInt32("operating-rate", &rateInt); 2255 rateFloat = (float)rateInt; // 16MHz (FLINTMAX) is OK for upper bound. 2256 } 2257 if (rateFloat > 0) { 2258 err = setOperatingRate(rateFloat, video); 2259 } 2260 2261 // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame. 2262 mBaseOutputFormat = outputFormat; 2263 // trigger a kWhatOutputFormatChanged msg on first buffer 2264 mLastOutputFormat.clear(); 2265 2266 err = getPortFormat(kPortIndexInput, inputFormat); 2267 if (err == OK) { 2268 err = getPortFormat(kPortIndexOutput, outputFormat); 2269 if (err == OK) { 2270 mInputFormat = inputFormat; 2271 mOutputFormat = outputFormat; 2272 } 2273 } 2274 2275 // create data converters if needed 2276 if (!video && err == OK) { 2277 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 2278 if (encoder) { 2279 (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2280 mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding); 2281 if (mConverter[kPortIndexInput] != NULL) { 2282 mInputFormat->setInt32("pcm-encoding", pcmEncoding); 2283 } 2284 } else { 2285 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding); 2286 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 2287 if (mConverter[kPortIndexOutput] != NULL) { 2288 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 2289 } 2290 } 2291 } 2292 2293 return err; 2294} 2295 2296status_t ACodec::setPriority(int32_t priority) { 2297 if (priority < 0) { 2298 return BAD_VALUE; 2299 } 2300 OMX_PARAM_U32TYPE config; 2301 InitOMXParams(&config); 2302 config.nU32 = (OMX_U32)priority; 2303 status_t temp = mOMX->setConfig( 2304 mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority, 2305 &config, sizeof(config)); 2306 if (temp != OK) { 2307 ALOGI("codec does not support config priority (err %d)", temp); 2308 } 2309 return OK; 2310} 2311 2312status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) { 2313 if (rateFloat < 0) { 2314 return BAD_VALUE; 2315 } 2316 OMX_U32 rate; 2317 if (isVideo) { 2318 if (rateFloat > 65535) { 2319 return BAD_VALUE; 2320 } 2321 rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f); 2322 } else { 2323 if (rateFloat > UINT_MAX) { 2324 return BAD_VALUE; 2325 } 2326 rate = (OMX_U32)(rateFloat); 2327 } 2328 OMX_PARAM_U32TYPE config; 2329 InitOMXParams(&config); 2330 config.nU32 = rate; 2331 status_t err = mOMX->setConfig( 2332 mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate, 2333 &config, sizeof(config)); 2334 if (err != OK) { 2335 ALOGI("codec does not support config operating rate (err %d)", err); 2336 } 2337 return OK; 2338} 2339 2340status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) { 2341 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2342 InitOMXParams(¶ms); 2343 params.nPortIndex = kPortIndexOutput; 2344 status_t err = mOMX->getConfig( 2345 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2346 if (err == OK) { 2347 *intraRefreshPeriod = params.nRefreshPeriod; 2348 return OK; 2349 } 2350 2351 // Fallback to query through standard OMX index. 2352 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2353 InitOMXParams(&refreshParams); 2354 refreshParams.nPortIndex = kPortIndexOutput; 2355 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2356 err = mOMX->getParameter( 2357 mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams)); 2358 if (err != OK || refreshParams.nCirMBs == 0) { 2359 *intraRefreshPeriod = 0; 2360 return OK; 2361 } 2362 2363 // Calculate period based on width and height 2364 uint32_t width, height; 2365 OMX_PARAM_PORTDEFINITIONTYPE def; 2366 InitOMXParams(&def); 2367 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2368 def.nPortIndex = kPortIndexOutput; 2369 err = mOMX->getParameter( 2370 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2371 if (err != OK) { 2372 *intraRefreshPeriod = 0; 2373 return err; 2374 } 2375 width = video_def->nFrameWidth; 2376 height = video_def->nFrameHeight; 2377 // Use H.264/AVC MacroBlock size 16x16 2378 *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs); 2379 2380 return OK; 2381} 2382 2383status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) { 2384 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 2385 InitOMXParams(¶ms); 2386 params.nPortIndex = kPortIndexOutput; 2387 params.nRefreshPeriod = intraRefreshPeriod; 2388 status_t err = mOMX->setConfig( 2389 mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, ¶ms, sizeof(params)); 2390 if (err == OK) { 2391 return OK; 2392 } 2393 2394 // Only in configure state, a component could invoke setParameter. 2395 if (!inConfigure) { 2396 return INVALID_OPERATION; 2397 } else { 2398 ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str()); 2399 } 2400 2401 OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams; 2402 InitOMXParams(&refreshParams); 2403 refreshParams.nPortIndex = kPortIndexOutput; 2404 refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic; 2405 2406 if (intraRefreshPeriod == 0) { 2407 // 0 means disable intra refresh. 2408 refreshParams.nCirMBs = 0; 2409 } else { 2410 // Calculate macroblocks that need to be intra coded base on width and height 2411 uint32_t width, height; 2412 OMX_PARAM_PORTDEFINITIONTYPE def; 2413 InitOMXParams(&def); 2414 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 2415 def.nPortIndex = kPortIndexOutput; 2416 err = mOMX->getParameter( 2417 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2418 if (err != OK) { 2419 return err; 2420 } 2421 width = video_def->nFrameWidth; 2422 height = video_def->nFrameHeight; 2423 // Use H.264/AVC MacroBlock size 16x16 2424 refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod); 2425 } 2426 2427 err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh, 2428 &refreshParams, sizeof(refreshParams)); 2429 if (err != OK) { 2430 return err; 2431 } 2432 2433 return OK; 2434} 2435 2436status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) { 2437 OMX_PARAM_PORTDEFINITIONTYPE def; 2438 InitOMXParams(&def); 2439 def.nPortIndex = portIndex; 2440 2441 status_t err = mOMX->getParameter( 2442 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2443 2444 if (err != OK) { 2445 return err; 2446 } 2447 2448 if (def.nBufferSize >= size) { 2449 return OK; 2450 } 2451 2452 def.nBufferSize = size; 2453 2454 err = mOMX->setParameter( 2455 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2456 2457 if (err != OK) { 2458 return err; 2459 } 2460 2461 err = mOMX->getParameter( 2462 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2463 2464 if (err != OK) { 2465 return err; 2466 } 2467 2468 if (def.nBufferSize < size) { 2469 ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize); 2470 return FAILED_TRANSACTION; 2471 } 2472 2473 return OK; 2474} 2475 2476status_t ACodec::selectAudioPortFormat( 2477 OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) { 2478 OMX_AUDIO_PARAM_PORTFORMATTYPE format; 2479 InitOMXParams(&format); 2480 2481 format.nPortIndex = portIndex; 2482 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2483 format.nIndex = index; 2484 status_t err = mOMX->getParameter( 2485 mNode, OMX_IndexParamAudioPortFormat, 2486 &format, sizeof(format)); 2487 2488 if (err != OK) { 2489 return err; 2490 } 2491 2492 if (format.eEncoding == desiredFormat) { 2493 break; 2494 } 2495 2496 if (index == kMaxIndicesToCheck) { 2497 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 2498 mComponentName.c_str(), index, 2499 asString(format.eEncoding), format.eEncoding); 2500 return ERROR_UNSUPPORTED; 2501 } 2502 } 2503 2504 return mOMX->setParameter( 2505 mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format)); 2506} 2507 2508status_t ACodec::setupAACCodec( 2509 bool encoder, int32_t numChannels, int32_t sampleRate, 2510 int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode, 2511 int32_t maxOutputChannelCount, const drcParams_t& drc, 2512 int32_t pcmLimiterEnable) { 2513 if (encoder && isADTS) { 2514 return -EINVAL; 2515 } 2516 2517 status_t err = setupRawAudioFormat( 2518 encoder ? kPortIndexInput : kPortIndexOutput, 2519 sampleRate, 2520 numChannels); 2521 2522 if (err != OK) { 2523 return err; 2524 } 2525 2526 if (encoder) { 2527 err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC); 2528 2529 if (err != OK) { 2530 return err; 2531 } 2532 2533 OMX_PARAM_PORTDEFINITIONTYPE def; 2534 InitOMXParams(&def); 2535 def.nPortIndex = kPortIndexOutput; 2536 2537 err = mOMX->getParameter( 2538 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2539 2540 if (err != OK) { 2541 return err; 2542 } 2543 2544 def.format.audio.bFlagErrorConcealment = OMX_TRUE; 2545 def.format.audio.eEncoding = OMX_AUDIO_CodingAAC; 2546 2547 err = mOMX->setParameter( 2548 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2549 2550 if (err != OK) { 2551 return err; 2552 } 2553 2554 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2555 InitOMXParams(&profile); 2556 profile.nPortIndex = kPortIndexOutput; 2557 2558 err = mOMX->getParameter( 2559 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2560 2561 if (err != OK) { 2562 return err; 2563 } 2564 2565 profile.nChannels = numChannels; 2566 2567 profile.eChannelMode = 2568 (numChannels == 1) 2569 ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo; 2570 2571 profile.nSampleRate = sampleRate; 2572 profile.nBitRate = bitRate; 2573 profile.nAudioBandWidth = 0; 2574 profile.nFrameLength = 0; 2575 profile.nAACtools = OMX_AUDIO_AACToolAll; 2576 profile.nAACERtools = OMX_AUDIO_AACERNone; 2577 profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile; 2578 profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF; 2579 switch (sbrMode) { 2580 case 0: 2581 // disable sbr 2582 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2583 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2584 break; 2585 case 1: 2586 // enable single-rate sbr 2587 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2588 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR; 2589 break; 2590 case 2: 2591 // enable dual-rate sbr 2592 profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR; 2593 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2594 break; 2595 case -1: 2596 // enable both modes -> the codec will decide which mode should be used 2597 profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR; 2598 profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR; 2599 break; 2600 default: 2601 // unsupported sbr mode 2602 return BAD_VALUE; 2603 } 2604 2605 2606 err = mOMX->setParameter( 2607 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2608 2609 if (err != OK) { 2610 return err; 2611 } 2612 2613 return err; 2614 } 2615 2616 OMX_AUDIO_PARAM_AACPROFILETYPE profile; 2617 InitOMXParams(&profile); 2618 profile.nPortIndex = kPortIndexInput; 2619 2620 err = mOMX->getParameter( 2621 mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2622 2623 if (err != OK) { 2624 return err; 2625 } 2626 2627 profile.nChannels = numChannels; 2628 profile.nSampleRate = sampleRate; 2629 2630 profile.eAACStreamFormat = 2631 isADTS 2632 ? OMX_AUDIO_AACStreamFormatMP4ADTS 2633 : OMX_AUDIO_AACStreamFormatMP4FF; 2634 2635 OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation; 2636 InitOMXParams(&presentation); 2637 presentation.nMaxOutputChannels = maxOutputChannelCount; 2638 presentation.nDrcCut = drc.drcCut; 2639 presentation.nDrcBoost = drc.drcBoost; 2640 presentation.nHeavyCompression = drc.heavyCompression; 2641 presentation.nTargetReferenceLevel = drc.targetRefLevel; 2642 presentation.nEncodedTargetLevel = drc.encodedTargetLevel; 2643 presentation.nPCMLimiterEnable = pcmLimiterEnable; 2644 2645 status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile)); 2646 if (res == OK) { 2647 // optional parameters, will not cause configuration failure 2648 mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation, 2649 &presentation, sizeof(presentation)); 2650 } else { 2651 ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res); 2652 } 2653 return res; 2654} 2655 2656status_t ACodec::setupAC3Codec( 2657 bool encoder, int32_t numChannels, int32_t sampleRate) { 2658 status_t err = setupRawAudioFormat( 2659 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2660 2661 if (err != OK) { 2662 return err; 2663 } 2664 2665 if (encoder) { 2666 ALOGW("AC3 encoding is not supported."); 2667 return INVALID_OPERATION; 2668 } 2669 2670 OMX_AUDIO_PARAM_ANDROID_AC3TYPE def; 2671 InitOMXParams(&def); 2672 def.nPortIndex = kPortIndexInput; 2673 2674 err = mOMX->getParameter( 2675 mNode, 2676 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2677 &def, 2678 sizeof(def)); 2679 2680 if (err != OK) { 2681 return err; 2682 } 2683 2684 def.nChannels = numChannels; 2685 def.nSampleRate = sampleRate; 2686 2687 return mOMX->setParameter( 2688 mNode, 2689 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 2690 &def, 2691 sizeof(def)); 2692} 2693 2694status_t ACodec::setupEAC3Codec( 2695 bool encoder, int32_t numChannels, int32_t sampleRate) { 2696 status_t err = setupRawAudioFormat( 2697 encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels); 2698 2699 if (err != OK) { 2700 return err; 2701 } 2702 2703 if (encoder) { 2704 ALOGW("EAC3 encoding is not supported."); 2705 return INVALID_OPERATION; 2706 } 2707 2708 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def; 2709 InitOMXParams(&def); 2710 def.nPortIndex = kPortIndexInput; 2711 2712 err = mOMX->getParameter( 2713 mNode, 2714 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2715 &def, 2716 sizeof(def)); 2717 2718 if (err != OK) { 2719 return err; 2720 } 2721 2722 def.nChannels = numChannels; 2723 def.nSampleRate = sampleRate; 2724 2725 return mOMX->setParameter( 2726 mNode, 2727 (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 2728 &def, 2729 sizeof(def)); 2730} 2731 2732static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate( 2733 bool isAMRWB, int32_t bps) { 2734 if (isAMRWB) { 2735 if (bps <= 6600) { 2736 return OMX_AUDIO_AMRBandModeWB0; 2737 } else if (bps <= 8850) { 2738 return OMX_AUDIO_AMRBandModeWB1; 2739 } else if (bps <= 12650) { 2740 return OMX_AUDIO_AMRBandModeWB2; 2741 } else if (bps <= 14250) { 2742 return OMX_AUDIO_AMRBandModeWB3; 2743 } else if (bps <= 15850) { 2744 return OMX_AUDIO_AMRBandModeWB4; 2745 } else if (bps <= 18250) { 2746 return OMX_AUDIO_AMRBandModeWB5; 2747 } else if (bps <= 19850) { 2748 return OMX_AUDIO_AMRBandModeWB6; 2749 } else if (bps <= 23050) { 2750 return OMX_AUDIO_AMRBandModeWB7; 2751 } 2752 2753 // 23850 bps 2754 return OMX_AUDIO_AMRBandModeWB8; 2755 } else { // AMRNB 2756 if (bps <= 4750) { 2757 return OMX_AUDIO_AMRBandModeNB0; 2758 } else if (bps <= 5150) { 2759 return OMX_AUDIO_AMRBandModeNB1; 2760 } else if (bps <= 5900) { 2761 return OMX_AUDIO_AMRBandModeNB2; 2762 } else if (bps <= 6700) { 2763 return OMX_AUDIO_AMRBandModeNB3; 2764 } else if (bps <= 7400) { 2765 return OMX_AUDIO_AMRBandModeNB4; 2766 } else if (bps <= 7950) { 2767 return OMX_AUDIO_AMRBandModeNB5; 2768 } else if (bps <= 10200) { 2769 return OMX_AUDIO_AMRBandModeNB6; 2770 } 2771 2772 // 12200 bps 2773 return OMX_AUDIO_AMRBandModeNB7; 2774 } 2775} 2776 2777status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) { 2778 OMX_AUDIO_PARAM_AMRTYPE def; 2779 InitOMXParams(&def); 2780 def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput; 2781 2782 status_t err = 2783 mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2784 2785 if (err != OK) { 2786 return err; 2787 } 2788 2789 def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF; 2790 def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate); 2791 2792 err = mOMX->setParameter( 2793 mNode, OMX_IndexParamAudioAmr, &def, sizeof(def)); 2794 2795 if (err != OK) { 2796 return err; 2797 } 2798 2799 return setupRawAudioFormat( 2800 encoder ? kPortIndexInput : kPortIndexOutput, 2801 isWAMR ? 16000 : 8000 /* sampleRate */, 2802 1 /* numChannels */); 2803} 2804 2805status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) { 2806 if (encoder) { 2807 return INVALID_OPERATION; 2808 } 2809 2810 return setupRawAudioFormat( 2811 kPortIndexInput, sampleRate, numChannels); 2812} 2813 2814status_t ACodec::setupFlacCodec( 2815 bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) { 2816 2817 if (encoder) { 2818 OMX_AUDIO_PARAM_FLACTYPE def; 2819 InitOMXParams(&def); 2820 def.nPortIndex = kPortIndexOutput; 2821 2822 // configure compression level 2823 status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2824 if (err != OK) { 2825 ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err); 2826 return err; 2827 } 2828 def.nCompressionLevel = compressionLevel; 2829 err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def)); 2830 if (err != OK) { 2831 ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err); 2832 return err; 2833 } 2834 } 2835 2836 return setupRawAudioFormat( 2837 encoder ? kPortIndexInput : kPortIndexOutput, 2838 sampleRate, 2839 numChannels); 2840} 2841 2842status_t ACodec::setupRawAudioFormat( 2843 OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) { 2844 OMX_PARAM_PORTDEFINITIONTYPE def; 2845 InitOMXParams(&def); 2846 def.nPortIndex = portIndex; 2847 2848 status_t err = mOMX->getParameter( 2849 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2850 2851 if (err != OK) { 2852 return err; 2853 } 2854 2855 def.format.audio.eEncoding = OMX_AUDIO_CodingPCM; 2856 2857 err = mOMX->setParameter( 2858 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 2859 2860 if (err != OK) { 2861 return err; 2862 } 2863 2864 OMX_AUDIO_PARAM_PCMMODETYPE pcmParams; 2865 InitOMXParams(&pcmParams); 2866 pcmParams.nPortIndex = portIndex; 2867 2868 err = mOMX->getParameter( 2869 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2870 2871 if (err != OK) { 2872 return err; 2873 } 2874 2875 pcmParams.nChannels = numChannels; 2876 switch (encoding) { 2877 case kAudioEncodingPcm8bit: 2878 pcmParams.eNumData = OMX_NumericalDataUnsigned; 2879 pcmParams.nBitPerSample = 8; 2880 break; 2881 case kAudioEncodingPcmFloat: 2882 pcmParams.eNumData = OMX_NumericalDataFloat; 2883 pcmParams.nBitPerSample = 32; 2884 break; 2885 case kAudioEncodingPcm16bit: 2886 pcmParams.eNumData = OMX_NumericalDataSigned; 2887 pcmParams.nBitPerSample = 16; 2888 break; 2889 default: 2890 return BAD_VALUE; 2891 } 2892 pcmParams.bInterleaved = OMX_TRUE; 2893 pcmParams.nSamplingRate = sampleRate; 2894 pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear; 2895 2896 if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) { 2897 return OMX_ErrorNone; 2898 } 2899 2900 err = mOMX->setParameter( 2901 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2902 // if we could not set up raw format to non-16-bit, try with 16-bit 2903 // NOTE: we will also verify this via readback, in case codec ignores these fields 2904 if (err != OK && encoding != kAudioEncodingPcm16bit) { 2905 pcmParams.eNumData = OMX_NumericalDataSigned; 2906 pcmParams.nBitPerSample = 16; 2907 err = mOMX->setParameter( 2908 mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams)); 2909 } 2910 return err; 2911} 2912 2913status_t ACodec::configureTunneledVideoPlayback( 2914 int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) { 2915 native_handle_t* sidebandHandle; 2916 2917 status_t err = mOMX->configureVideoTunnelMode( 2918 mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle); 2919 if (err != OK) { 2920 ALOGE("configureVideoTunnelMode failed! (err %d).", err); 2921 return err; 2922 } 2923 2924 err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle); 2925 if (err != OK) { 2926 ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).", 2927 sidebandHandle, err); 2928 return err; 2929 } 2930 2931 return OK; 2932} 2933 2934status_t ACodec::setVideoPortFormatType( 2935 OMX_U32 portIndex, 2936 OMX_VIDEO_CODINGTYPE compressionFormat, 2937 OMX_COLOR_FORMATTYPE colorFormat, 2938 bool usingNativeBuffers) { 2939 OMX_VIDEO_PARAM_PORTFORMATTYPE format; 2940 InitOMXParams(&format); 2941 format.nPortIndex = portIndex; 2942 format.nIndex = 0; 2943 bool found = false; 2944 2945 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 2946 format.nIndex = index; 2947 status_t err = mOMX->getParameter( 2948 mNode, OMX_IndexParamVideoPortFormat, 2949 &format, sizeof(format)); 2950 2951 if (err != OK) { 2952 return err; 2953 } 2954 2955 // substitute back flexible color format to codec supported format 2956 OMX_U32 flexibleEquivalent; 2957 if (compressionFormat == OMX_VIDEO_CodingUnused 2958 && isFlexibleColorFormat( 2959 mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent) 2960 && colorFormat == flexibleEquivalent) { 2961 ALOGI("[%s] using color format %#x in place of %#x", 2962 mComponentName.c_str(), format.eColorFormat, colorFormat); 2963 colorFormat = format.eColorFormat; 2964 } 2965 2966 // The following assertion is violated by TI's video decoder. 2967 // CHECK_EQ(format.nIndex, index); 2968 2969 if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) { 2970 if (portIndex == kPortIndexInput 2971 && colorFormat == format.eColorFormat) { 2972 // eCompressionFormat does not seem right. 2973 found = true; 2974 break; 2975 } 2976 if (portIndex == kPortIndexOutput 2977 && compressionFormat == format.eCompressionFormat) { 2978 // eColorFormat does not seem right. 2979 found = true; 2980 break; 2981 } 2982 } 2983 2984 if (format.eCompressionFormat == compressionFormat 2985 && format.eColorFormat == colorFormat) { 2986 found = true; 2987 break; 2988 } 2989 2990 if (index == kMaxIndicesToCheck) { 2991 ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)", 2992 mComponentName.c_str(), index, 2993 asString(format.eCompressionFormat), format.eCompressionFormat, 2994 asString(format.eColorFormat), format.eColorFormat); 2995 } 2996 } 2997 2998 if (!found) { 2999 return UNKNOWN_ERROR; 3000 } 3001 3002 status_t err = mOMX->setParameter( 3003 mNode, OMX_IndexParamVideoPortFormat, 3004 &format, sizeof(format)); 3005 3006 return err; 3007} 3008 3009// Set optimal output format. OMX component lists output formats in the order 3010// of preference, but this got more complicated since the introduction of flexible 3011// YUV formats. We support a legacy behavior for applications that do not use 3012// surface output, do not specify an output format, but expect a "usable" standard 3013// OMX format. SW readable and standard formats must be flex-YUV. 3014// 3015// Suggested preference order: 3016// - optimal format for texture rendering (mediaplayer behavior) 3017// - optimal SW readable & texture renderable format (flex-YUV support) 3018// - optimal SW readable non-renderable format (flex-YUV bytebuffer support) 3019// - legacy "usable" standard formats 3020// 3021// For legacy support, we prefer a standard format, but will settle for a SW readable 3022// flex-YUV format. 3023status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) { 3024 OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat; 3025 InitOMXParams(&format); 3026 format.nPortIndex = kPortIndexOutput; 3027 3028 InitOMXParams(&legacyFormat); 3029 // this field will change when we find a suitable legacy format 3030 legacyFormat.eColorFormat = OMX_COLOR_FormatUnused; 3031 3032 for (OMX_U32 index = 0; ; ++index) { 3033 format.nIndex = index; 3034 status_t err = mOMX->getParameter( 3035 mNode, OMX_IndexParamVideoPortFormat, 3036 &format, sizeof(format)); 3037 if (err != OK) { 3038 // no more formats, pick legacy format if found 3039 if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) { 3040 memcpy(&format, &legacyFormat, sizeof(format)); 3041 break; 3042 } 3043 return err; 3044 } 3045 if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) { 3046 return OMX_ErrorBadParameter; 3047 } 3048 if (!getLegacyFlexibleFormat) { 3049 break; 3050 } 3051 // standard formats that were exposed to users before 3052 if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar 3053 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar 3054 || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar 3055 || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar 3056 || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) { 3057 break; 3058 } 3059 // find best legacy non-standard format 3060 OMX_U32 flexibleEquivalent; 3061 if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused 3062 && isFlexibleColorFormat( 3063 mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */, 3064 &flexibleEquivalent) 3065 && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) { 3066 memcpy(&legacyFormat, &format, sizeof(format)); 3067 } 3068 } 3069 return mOMX->setParameter( 3070 mNode, OMX_IndexParamVideoPortFormat, 3071 &format, sizeof(format)); 3072} 3073 3074static const struct VideoCodingMapEntry { 3075 const char *mMime; 3076 OMX_VIDEO_CODINGTYPE mVideoCodingType; 3077} kVideoCodingMapEntry[] = { 3078 { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC }, 3079 { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC }, 3080 { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 }, 3081 { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 }, 3082 { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 }, 3083 { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 }, 3084 { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 }, 3085 { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision }, 3086}; 3087 3088static status_t GetVideoCodingTypeFromMime( 3089 const char *mime, OMX_VIDEO_CODINGTYPE *codingType) { 3090 for (size_t i = 0; 3091 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3092 ++i) { 3093 if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) { 3094 *codingType = kVideoCodingMapEntry[i].mVideoCodingType; 3095 return OK; 3096 } 3097 } 3098 3099 *codingType = OMX_VIDEO_CodingUnused; 3100 3101 return ERROR_UNSUPPORTED; 3102} 3103 3104static status_t GetMimeTypeForVideoCoding( 3105 OMX_VIDEO_CODINGTYPE codingType, AString *mime) { 3106 for (size_t i = 0; 3107 i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]); 3108 ++i) { 3109 if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) { 3110 *mime = kVideoCodingMapEntry[i].mMime; 3111 return OK; 3112 } 3113 } 3114 3115 mime->clear(); 3116 3117 return ERROR_UNSUPPORTED; 3118} 3119 3120status_t ACodec::setupVideoDecoder( 3121 const char *mime, const sp<AMessage> &msg, bool haveNativeWindow, 3122 bool usingSwRenderer, sp<AMessage> &outputFormat) { 3123 int32_t width, height; 3124 if (!msg->findInt32("width", &width) 3125 || !msg->findInt32("height", &height)) { 3126 return INVALID_OPERATION; 3127 } 3128 3129 OMX_VIDEO_CODINGTYPE compressionFormat; 3130 status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3131 3132 if (err != OK) { 3133 return err; 3134 } 3135 3136 err = setVideoPortFormatType( 3137 kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused); 3138 3139 if (err != OK) { 3140 return err; 3141 } 3142 3143 int32_t tmp; 3144 if (msg->findInt32("color-format", &tmp)) { 3145 OMX_COLOR_FORMATTYPE colorFormat = 3146 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3147 err = setVideoPortFormatType( 3148 kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow); 3149 if (err != OK) { 3150 ALOGW("[%s] does not support color format %d", 3151 mComponentName.c_str(), colorFormat); 3152 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3153 } 3154 } else { 3155 err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */); 3156 } 3157 3158 if (err != OK) { 3159 return err; 3160 } 3161 3162 int32_t frameRateInt; 3163 float frameRateFloat; 3164 if (!msg->findFloat("frame-rate", &frameRateFloat)) { 3165 if (!msg->findInt32("frame-rate", &frameRateInt)) { 3166 frameRateInt = -1; 3167 } 3168 frameRateFloat = (float)frameRateInt; 3169 } 3170 3171 err = setVideoFormatOnPort( 3172 kPortIndexInput, width, height, compressionFormat, frameRateFloat); 3173 3174 if (err != OK) { 3175 return err; 3176 } 3177 3178 err = setVideoFormatOnPort( 3179 kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused); 3180 3181 if (err != OK) { 3182 return err; 3183 } 3184 3185 err = setColorAspectsForVideoDecoder( 3186 width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat); 3187 if (err == ERROR_UNSUPPORTED) { // support is optional 3188 err = OK; 3189 } 3190 3191 if (err != OK) { 3192 return err; 3193 } 3194 3195 err = setHDRStaticInfoForVideoDecoder(msg, outputFormat); 3196 if (err == ERROR_UNSUPPORTED) { // support is optional 3197 err = OK; 3198 } 3199 return err; 3200} 3201 3202status_t ACodec::initDescribeColorAspectsIndex() { 3203 status_t err = mOMX->getExtensionIndex( 3204 mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex); 3205 if (err != OK) { 3206 mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0; 3207 } 3208 return err; 3209} 3210 3211status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams ¶ms, bool verify) { 3212 status_t err = ERROR_UNSUPPORTED; 3213 if (mDescribeColorAspectsIndex) { 3214 err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3215 } 3216 ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3217 mComponentName.c_str(), 3218 params.sAspects.mRange, asString(params.sAspects.mRange), 3219 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3220 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3221 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3222 err, asString(err)); 3223 3224 if (verify && err == OK) { 3225 err = getCodecColorAspects(params); 3226 } 3227 3228 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex, 3229 "[%s] setting color aspects failed even though codec advertises support", 3230 mComponentName.c_str()); 3231 return err; 3232} 3233 3234status_t ACodec::setColorAspectsForVideoDecoder( 3235 int32_t width, int32_t height, bool usingNativeWindow, 3236 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3237 DescribeColorAspectsParams params; 3238 InitOMXParams(¶ms); 3239 params.nPortIndex = kPortIndexOutput; 3240 3241 getColorAspectsFromFormat(configFormat, params.sAspects); 3242 if (usingNativeWindow) { 3243 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3244 // The default aspects will be set back to the output format during the 3245 // getFormat phase of configure(). Set non-Unspecified values back into the 3246 // format, in case component does not support this enumeration. 3247 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3248 } 3249 3250 (void)initDescribeColorAspectsIndex(); 3251 3252 // communicate color aspects to codec 3253 return setCodecColorAspects(params); 3254} 3255 3256status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams ¶ms) { 3257 status_t err = ERROR_UNSUPPORTED; 3258 if (mDescribeColorAspectsIndex) { 3259 err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, ¶ms, sizeof(params)); 3260 } 3261 ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)", 3262 mComponentName.c_str(), 3263 params.sAspects.mRange, asString(params.sAspects.mRange), 3264 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3265 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3266 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3267 err, asString(err)); 3268 if (params.bRequestingDataSpace) { 3269 ALOGV("for dataspace %#x", params.nDataSpace); 3270 } 3271 if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex 3272 && !params.bRequestingDataSpace && !params.bDataSpaceChanged) { 3273 ALOGW("[%s] getting color aspects failed even though codec advertises support", 3274 mComponentName.c_str()); 3275 } 3276 return err; 3277} 3278 3279status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) { 3280 DescribeColorAspectsParams params; 3281 InitOMXParams(¶ms); 3282 params.nPortIndex = kPortIndexInput; 3283 status_t err = getCodecColorAspects(params); 3284 if (err == OK) { 3285 // we only set encoder input aspects if codec supports them 3286 setColorAspectsIntoFormat(params.sAspects, format, true /* force */); 3287 } 3288 return err; 3289} 3290 3291status_t ACodec::getDataSpace( 3292 DescribeColorAspectsParams ¶ms, android_dataspace *dataSpace /* nonnull */, 3293 bool tryCodec) { 3294 status_t err = OK; 3295 if (tryCodec) { 3296 // request dataspace guidance from codec. 3297 params.bRequestingDataSpace = OMX_TRUE; 3298 err = getCodecColorAspects(params); 3299 params.bRequestingDataSpace = OMX_FALSE; 3300 if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) { 3301 *dataSpace = (android_dataspace)params.nDataSpace; 3302 return err; 3303 } else if (err == ERROR_UNSUPPORTED) { 3304 // ignore not-implemented error for dataspace requests 3305 err = OK; 3306 } 3307 } 3308 3309 // this returns legacy versions if available 3310 *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */); 3311 ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 3312 "and dataspace %#x", 3313 mComponentName.c_str(), 3314 params.sAspects.mRange, asString(params.sAspects.mRange), 3315 params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries), 3316 params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs), 3317 params.sAspects.mTransfer, asString(params.sAspects.mTransfer), 3318 *dataSpace); 3319 return err; 3320} 3321 3322 3323status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder( 3324 int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, 3325 android_dataspace *dataSpace) { 3326 DescribeColorAspectsParams params; 3327 InitOMXParams(¶ms); 3328 params.nPortIndex = kPortIndexOutput; 3329 3330 // reset default format and get resulting format 3331 getColorAspectsFromFormat(configFormat, params.sAspects); 3332 if (dataSpace != NULL) { 3333 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3334 } 3335 status_t err = setCodecColorAspects(params, true /* readBack */); 3336 3337 // we always set specified aspects for decoders 3338 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3339 3340 if (dataSpace != NULL) { 3341 status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */); 3342 if (err == OK) { 3343 err = res; 3344 } 3345 } 3346 3347 return err; 3348} 3349 3350// initial video encoder setup for bytebuffer mode 3351status_t ACodec::setColorAspectsForVideoEncoder( 3352 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3353 // copy config to output format as this is not exposed via getFormat 3354 copyColorConfig(configFormat, outputFormat); 3355 3356 DescribeColorAspectsParams params; 3357 InitOMXParams(¶ms); 3358 params.nPortIndex = kPortIndexInput; 3359 getColorAspectsFromFormat(configFormat, params.sAspects); 3360 3361 (void)initDescribeColorAspectsIndex(); 3362 3363 int32_t usingRecorder; 3364 if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) { 3365 android_dataspace dataSpace = HAL_DATASPACE_BT709; 3366 int32_t width, height; 3367 if (configFormat->findInt32("width", &width) 3368 && configFormat->findInt32("height", &height)) { 3369 setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height); 3370 status_t err = getDataSpace( 3371 params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */); 3372 if (err != OK) { 3373 return err; 3374 } 3375 setColorAspectsIntoFormat(params.sAspects, outputFormat); 3376 } 3377 inputFormat->setInt32("android._dataspace", (int32_t)dataSpace); 3378 } 3379 3380 // communicate color aspects to codec, but do not allow change of the platform aspects 3381 ColorAspects origAspects = params.sAspects; 3382 for (int triesLeft = 2; --triesLeft >= 0; ) { 3383 status_t err = setCodecColorAspects(params, true /* readBack */); 3384 if (err != OK 3385 || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem( 3386 params.sAspects, origAspects, true /* usePlatformAspects */)) { 3387 return err; 3388 } 3389 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3390 mComponentName.c_str()); 3391 } 3392 return OK; 3393} 3394 3395// subsequent initial video encoder setup for surface mode 3396status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace( 3397 android_dataspace *dataSpace /* nonnull */) { 3398 DescribeColorAspectsParams params; 3399 InitOMXParams(¶ms); 3400 params.nPortIndex = kPortIndexInput; 3401 ColorAspects &aspects = params.sAspects; 3402 3403 // reset default format and store resulting format into both input and output formats 3404 getColorAspectsFromFormat(mConfigFormat, aspects); 3405 int32_t width, height; 3406 if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) { 3407 setDefaultCodecColorAspectsIfNeeded(aspects, width, height); 3408 } 3409 setColorAspectsIntoFormat(aspects, mInputFormat); 3410 setColorAspectsIntoFormat(aspects, mOutputFormat); 3411 3412 // communicate color aspects to codec, but do not allow any change 3413 ColorAspects origAspects = aspects; 3414 status_t err = OK; 3415 for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) { 3416 status_t err = setCodecColorAspects(params, true /* readBack */); 3417 if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) { 3418 break; 3419 } 3420 ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.", 3421 mComponentName.c_str()); 3422 } 3423 3424 *dataSpace = HAL_DATASPACE_BT709; 3425 aspects = origAspects; // restore desired color aspects 3426 status_t res = getDataSpace( 3427 params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */); 3428 if (err == OK) { 3429 err = res; 3430 } 3431 mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace); 3432 mInputFormat->setBuffer( 3433 "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects))); 3434 3435 // update input format with codec supported color aspects (basically set unsupported 3436 // aspects to Unspecified) 3437 if (err == OK) { 3438 (void)getInputColorAspectsForVideoEncoder(mInputFormat); 3439 } 3440 3441 ALOGV("set default color aspects, updated input format to %s, output format to %s", 3442 mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str()); 3443 3444 return err; 3445} 3446 3447status_t ACodec::getHDRStaticInfoForVideoDecoder(sp<AMessage> &format) { 3448 DescribeHDRStaticInfoParams params; 3449 InitOMXParams(¶ms); 3450 params.nPortIndex = kPortIndexOutput; 3451 3452 status_t err = getHDRStaticInfo(params); 3453 if (err == OK) { 3454 // we only set decodec output HDRStaticInfo if codec supports them 3455 setHDRStaticInfoIntoFormat(params.sInfo, format); 3456 } 3457 return err; 3458} 3459 3460status_t ACodec::initDescribeHDRStaticInfoIndex() { 3461 status_t err = mOMX->getExtensionIndex( 3462 mNode, "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex); 3463 if (err != OK) { 3464 mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0; 3465 } 3466 return err; 3467} 3468 3469status_t ACodec::setHDRStaticInfoForVideoDecoder( 3470 const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) { 3471 DescribeHDRStaticInfoParams params; 3472 InitOMXParams(¶ms); 3473 params.nPortIndex = kPortIndexOutput; 3474 3475 HDRStaticInfo *info = ¶ms.sInfo; 3476 if (getHDRStaticInfoFromFormat(configFormat, info)) { 3477 setHDRStaticInfoIntoFormat(params.sInfo, outputFormat); 3478 } 3479 3480 (void)initDescribeHDRStaticInfoIndex(); 3481 3482 // communicate HDR static Info to codec 3483 return setHDRStaticInfo(params); 3484} 3485 3486status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams ¶ms) { 3487 status_t err = ERROR_UNSUPPORTED; 3488 if (mDescribeHDRStaticInfoIndex) { 3489 err = mOMX->setConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3490 } 3491 3492 const HDRStaticInfo *info = ¶ms.sInfo; 3493 ALOGV("[%s] setting HDRStaticInfo (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, " 3494 "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)", 3495 mComponentName.c_str(), 3496 info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y, 3497 info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y, 3498 info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance, 3499 info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel); 3500 3501 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3502 "[%s] setting HDRStaticInfo failed even though codec advertises support", 3503 mComponentName.c_str()); 3504 return err; 3505} 3506 3507status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams ¶ms) { 3508 status_t err = ERROR_UNSUPPORTED; 3509 if (mDescribeHDRStaticInfoIndex) { 3510 err = mOMX->getConfig(mNode, mDescribeHDRStaticInfoIndex, ¶ms, sizeof(params)); 3511 } 3512 3513 ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex, 3514 "[%s] getting HDRStaticInfo failed even though codec advertises support", 3515 mComponentName.c_str()); 3516 return err; 3517} 3518 3519status_t ACodec::setupVideoEncoder( 3520 const char *mime, const sp<AMessage> &msg, 3521 sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) { 3522 int32_t tmp; 3523 if (!msg->findInt32("color-format", &tmp)) { 3524 return INVALID_OPERATION; 3525 } 3526 3527 OMX_COLOR_FORMATTYPE colorFormat = 3528 static_cast<OMX_COLOR_FORMATTYPE>(tmp); 3529 3530 status_t err = setVideoPortFormatType( 3531 kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat); 3532 3533 if (err != OK) { 3534 ALOGE("[%s] does not support color format %d", 3535 mComponentName.c_str(), colorFormat); 3536 3537 return err; 3538 } 3539 3540 /* Input port configuration */ 3541 3542 OMX_PARAM_PORTDEFINITIONTYPE def; 3543 InitOMXParams(&def); 3544 3545 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 3546 3547 def.nPortIndex = kPortIndexInput; 3548 3549 err = mOMX->getParameter( 3550 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3551 3552 if (err != OK) { 3553 return err; 3554 } 3555 3556 int32_t width, height, bitrate; 3557 if (!msg->findInt32("width", &width) 3558 || !msg->findInt32("height", &height) 3559 || !msg->findInt32("bitrate", &bitrate)) { 3560 return INVALID_OPERATION; 3561 } 3562 3563 video_def->nFrameWidth = width; 3564 video_def->nFrameHeight = height; 3565 3566 int32_t stride; 3567 if (!msg->findInt32("stride", &stride)) { 3568 stride = width; 3569 } 3570 3571 video_def->nStride = stride; 3572 3573 int32_t sliceHeight; 3574 if (!msg->findInt32("slice-height", &sliceHeight)) { 3575 sliceHeight = height; 3576 } 3577 3578 video_def->nSliceHeight = sliceHeight; 3579 3580 def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2; 3581 3582 float frameRate; 3583 if (!msg->findFloat("frame-rate", &frameRate)) { 3584 int32_t tmp; 3585 if (!msg->findInt32("frame-rate", &tmp)) { 3586 return INVALID_OPERATION; 3587 } 3588 frameRate = (float)tmp; 3589 mTimePerFrameUs = (int64_t) (1000000.0f / frameRate); 3590 } 3591 3592 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 3593 video_def->eCompressionFormat = OMX_VIDEO_CodingUnused; 3594 // this is redundant as it was already set up in setVideoPortFormatType 3595 // FIXME for now skip this only for flexible YUV formats 3596 if (colorFormat != OMX_COLOR_FormatYUV420Flexible) { 3597 video_def->eColorFormat = colorFormat; 3598 } 3599 3600 err = mOMX->setParameter( 3601 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3602 3603 if (err != OK) { 3604 ALOGE("[%s] failed to set input port definition parameters.", 3605 mComponentName.c_str()); 3606 3607 return err; 3608 } 3609 3610 /* Output port configuration */ 3611 3612 OMX_VIDEO_CODINGTYPE compressionFormat; 3613 err = GetVideoCodingTypeFromMime(mime, &compressionFormat); 3614 3615 if (err != OK) { 3616 return err; 3617 } 3618 3619 err = setVideoPortFormatType( 3620 kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused); 3621 3622 if (err != OK) { 3623 ALOGE("[%s] does not support compression format %d", 3624 mComponentName.c_str(), compressionFormat); 3625 3626 return err; 3627 } 3628 3629 def.nPortIndex = kPortIndexOutput; 3630 3631 err = mOMX->getParameter( 3632 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3633 3634 if (err != OK) { 3635 return err; 3636 } 3637 3638 video_def->nFrameWidth = width; 3639 video_def->nFrameHeight = height; 3640 video_def->xFramerate = 0; 3641 video_def->nBitrate = bitrate; 3642 video_def->eCompressionFormat = compressionFormat; 3643 video_def->eColorFormat = OMX_COLOR_FormatUnused; 3644 3645 err = mOMX->setParameter( 3646 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 3647 3648 if (err != OK) { 3649 ALOGE("[%s] failed to set output port definition parameters.", 3650 mComponentName.c_str()); 3651 3652 return err; 3653 } 3654 3655 int32_t intraRefreshPeriod = 0; 3656 if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod) 3657 && intraRefreshPeriod >= 0) { 3658 err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true); 3659 if (err != OK) { 3660 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 3661 mComponentName.c_str()); 3662 err = OK; 3663 } 3664 } 3665 3666 switch (compressionFormat) { 3667 case OMX_VIDEO_CodingMPEG4: 3668 err = setupMPEG4EncoderParameters(msg); 3669 break; 3670 3671 case OMX_VIDEO_CodingH263: 3672 err = setupH263EncoderParameters(msg); 3673 break; 3674 3675 case OMX_VIDEO_CodingAVC: 3676 err = setupAVCEncoderParameters(msg); 3677 break; 3678 3679 case OMX_VIDEO_CodingHEVC: 3680 err = setupHEVCEncoderParameters(msg); 3681 break; 3682 3683 case OMX_VIDEO_CodingVP8: 3684 case OMX_VIDEO_CodingVP9: 3685 err = setupVPXEncoderParameters(msg); 3686 break; 3687 3688 default: 3689 break; 3690 } 3691 3692 // Set up color aspects on input, but propagate them to the output format, as they will 3693 // not be read back from encoder. 3694 err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat); 3695 if (err == ERROR_UNSUPPORTED) { 3696 ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str()); 3697 err = OK; 3698 } 3699 3700 if (err == OK) { 3701 ALOGI("setupVideoEncoder succeeded"); 3702 } 3703 3704 return err; 3705} 3706 3707status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) { 3708 OMX_VIDEO_PARAM_INTRAREFRESHTYPE params; 3709 InitOMXParams(¶ms); 3710 params.nPortIndex = kPortIndexOutput; 3711 3712 params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode); 3713 3714 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic || 3715 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3716 int32_t mbs; 3717 if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) { 3718 return INVALID_OPERATION; 3719 } 3720 params.nCirMBs = mbs; 3721 } 3722 3723 if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive || 3724 params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) { 3725 int32_t mbs; 3726 if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) { 3727 return INVALID_OPERATION; 3728 } 3729 params.nAirMBs = mbs; 3730 3731 int32_t ref; 3732 if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) { 3733 return INVALID_OPERATION; 3734 } 3735 params.nAirRef = ref; 3736 } 3737 3738 status_t err = mOMX->setParameter( 3739 mNode, OMX_IndexParamVideoIntraRefresh, 3740 ¶ms, sizeof(params)); 3741 return err; 3742} 3743 3744static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) { 3745 if (iFramesInterval < 0) { 3746 return 0xFFFFFFFF; 3747 } else if (iFramesInterval == 0) { 3748 return 0; 3749 } 3750 OMX_U32 ret = frameRate * iFramesInterval; 3751 return ret; 3752} 3753 3754static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) { 3755 int32_t tmp; 3756 if (!msg->findInt32("bitrate-mode", &tmp)) { 3757 return OMX_Video_ControlRateVariable; 3758 } 3759 3760 return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp); 3761} 3762 3763status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) { 3764 int32_t bitrate, iFrameInterval; 3765 if (!msg->findInt32("bitrate", &bitrate) 3766 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3767 return INVALID_OPERATION; 3768 } 3769 3770 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3771 3772 float frameRate; 3773 if (!msg->findFloat("frame-rate", &frameRate)) { 3774 int32_t tmp; 3775 if (!msg->findInt32("frame-rate", &tmp)) { 3776 return INVALID_OPERATION; 3777 } 3778 frameRate = (float)tmp; 3779 } 3780 3781 OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type; 3782 InitOMXParams(&mpeg4type); 3783 mpeg4type.nPortIndex = kPortIndexOutput; 3784 3785 status_t err = mOMX->getParameter( 3786 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3787 3788 if (err != OK) { 3789 return err; 3790 } 3791 3792 mpeg4type.nSliceHeaderSpacing = 0; 3793 mpeg4type.bSVH = OMX_FALSE; 3794 mpeg4type.bGov = OMX_FALSE; 3795 3796 mpeg4type.nAllowedPictureTypes = 3797 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3798 3799 mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3800 if (mpeg4type.nPFrames == 0) { 3801 mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3802 } 3803 mpeg4type.nBFrames = 0; 3804 mpeg4type.nIDCVLCThreshold = 0; 3805 mpeg4type.bACPred = OMX_TRUE; 3806 mpeg4type.nMaxPacketSize = 256; 3807 mpeg4type.nTimeIncRes = 1000; 3808 mpeg4type.nHeaderExtension = 0; 3809 mpeg4type.bReversibleVLC = OMX_FALSE; 3810 3811 int32_t profile; 3812 if (msg->findInt32("profile", &profile)) { 3813 int32_t level; 3814 if (!msg->findInt32("level", &level)) { 3815 return INVALID_OPERATION; 3816 } 3817 3818 err = verifySupportForProfileAndLevel(profile, level); 3819 3820 if (err != OK) { 3821 return err; 3822 } 3823 3824 mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile); 3825 mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level); 3826 } 3827 3828 err = mOMX->setParameter( 3829 mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type)); 3830 3831 if (err != OK) { 3832 return err; 3833 } 3834 3835 err = configureBitrate(bitrate, bitrateMode); 3836 3837 if (err != OK) { 3838 return err; 3839 } 3840 3841 return setupErrorCorrectionParameters(); 3842} 3843 3844status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) { 3845 int32_t bitrate, iFrameInterval; 3846 if (!msg->findInt32("bitrate", &bitrate) 3847 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3848 return INVALID_OPERATION; 3849 } 3850 3851 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3852 3853 float frameRate; 3854 if (!msg->findFloat("frame-rate", &frameRate)) { 3855 int32_t tmp; 3856 if (!msg->findInt32("frame-rate", &tmp)) { 3857 return INVALID_OPERATION; 3858 } 3859 frameRate = (float)tmp; 3860 } 3861 3862 OMX_VIDEO_PARAM_H263TYPE h263type; 3863 InitOMXParams(&h263type); 3864 h263type.nPortIndex = kPortIndexOutput; 3865 3866 status_t err = mOMX->getParameter( 3867 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3868 3869 if (err != OK) { 3870 return err; 3871 } 3872 3873 h263type.nAllowedPictureTypes = 3874 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 3875 3876 h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 3877 if (h263type.nPFrames == 0) { 3878 h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 3879 } 3880 h263type.nBFrames = 0; 3881 3882 int32_t profile; 3883 if (msg->findInt32("profile", &profile)) { 3884 int32_t level; 3885 if (!msg->findInt32("level", &level)) { 3886 return INVALID_OPERATION; 3887 } 3888 3889 err = verifySupportForProfileAndLevel(profile, level); 3890 3891 if (err != OK) { 3892 return err; 3893 } 3894 3895 h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile); 3896 h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level); 3897 } 3898 3899 h263type.bPLUSPTYPEAllowed = OMX_FALSE; 3900 h263type.bForceRoundingTypeToZero = OMX_FALSE; 3901 h263type.nPictureHeaderRepetition = 0; 3902 h263type.nGOBHeaderInterval = 0; 3903 3904 err = mOMX->setParameter( 3905 mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type)); 3906 3907 if (err != OK) { 3908 return err; 3909 } 3910 3911 err = configureBitrate(bitrate, bitrateMode); 3912 3913 if (err != OK) { 3914 return err; 3915 } 3916 3917 return setupErrorCorrectionParameters(); 3918} 3919 3920// static 3921int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor( 3922 int width, int height, int rate, int bitrate, 3923 OMX_VIDEO_AVCPROFILETYPE profile) { 3924 // convert bitrate to main/baseline profile kbps equivalent 3925 switch (profile) { 3926 case OMX_VIDEO_AVCProfileHigh10: 3927 bitrate = divUp(bitrate, 3000); break; 3928 case OMX_VIDEO_AVCProfileHigh: 3929 bitrate = divUp(bitrate, 1250); break; 3930 default: 3931 bitrate = divUp(bitrate, 1000); break; 3932 } 3933 3934 // convert size and rate to MBs 3935 width = divUp(width, 16); 3936 height = divUp(height, 16); 3937 int mbs = width * height; 3938 rate *= mbs; 3939 int maxDimension = max(width, height); 3940 3941 static const int limits[][5] = { 3942 /* MBps MB dim bitrate level */ 3943 { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 }, 3944 { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b }, 3945 { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 }, 3946 { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 }, 3947 { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 }, 3948 { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 }, 3949 { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 }, 3950 { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 }, 3951 { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 }, 3952 { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 }, 3953 { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 }, 3954 { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 }, 3955 { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 }, 3956 { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 }, 3957 { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 }, 3958 { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 }, 3959 { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 }, 3960 }; 3961 3962 for (size_t i = 0; i < ARRAY_SIZE(limits); i++) { 3963 const int (&limit)[5] = limits[i]; 3964 if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2] 3965 && bitrate <= limit[3]) { 3966 return limit[4]; 3967 } 3968 } 3969 return 0; 3970} 3971 3972status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) { 3973 int32_t bitrate, iFrameInterval; 3974 if (!msg->findInt32("bitrate", &bitrate) 3975 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 3976 return INVALID_OPERATION; 3977 } 3978 3979 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 3980 3981 float frameRate; 3982 if (!msg->findFloat("frame-rate", &frameRate)) { 3983 int32_t tmp; 3984 if (!msg->findInt32("frame-rate", &tmp)) { 3985 return INVALID_OPERATION; 3986 } 3987 frameRate = (float)tmp; 3988 } 3989 3990 status_t err = OK; 3991 int32_t intraRefreshMode = 0; 3992 if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) { 3993 err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode); 3994 if (err != OK) { 3995 ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x", 3996 err, intraRefreshMode); 3997 return err; 3998 } 3999 } 4000 4001 OMX_VIDEO_PARAM_AVCTYPE h264type; 4002 InitOMXParams(&h264type); 4003 h264type.nPortIndex = kPortIndexOutput; 4004 4005 err = mOMX->getParameter( 4006 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4007 4008 if (err != OK) { 4009 return err; 4010 } 4011 4012 h264type.nAllowedPictureTypes = 4013 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP; 4014 4015 int32_t profile; 4016 if (msg->findInt32("profile", &profile)) { 4017 int32_t level; 4018 if (!msg->findInt32("level", &level)) { 4019 return INVALID_OPERATION; 4020 } 4021 4022 err = verifySupportForProfileAndLevel(profile, level); 4023 4024 if (err != OK) { 4025 return err; 4026 } 4027 4028 h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile); 4029 h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level); 4030 } else { 4031 // Use baseline profile for AVC recording if profile is not specified. 4032 h264type.eProfile = OMX_VIDEO_AVCProfileBaseline; 4033 } 4034 4035 ALOGI("setupAVCEncoderParameters with [profile: %s] [level: %s]", 4036 asString(h264type.eProfile), asString(h264type.eLevel)); 4037 4038 if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) { 4039 h264type.nSliceHeaderSpacing = 0; 4040 h264type.bUseHadamard = OMX_TRUE; 4041 h264type.nRefFrames = 1; 4042 h264type.nBFrames = 0; 4043 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 4044 if (h264type.nPFrames == 0) { 4045 h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI; 4046 } 4047 h264type.nRefIdx10ActiveMinus1 = 0; 4048 h264type.nRefIdx11ActiveMinus1 = 0; 4049 h264type.bEntropyCodingCABAC = OMX_FALSE; 4050 h264type.bWeightedPPrediction = OMX_FALSE; 4051 h264type.bconstIpred = OMX_FALSE; 4052 h264type.bDirect8x8Inference = OMX_FALSE; 4053 h264type.bDirectSpatialTemporal = OMX_FALSE; 4054 h264type.nCabacInitIdc = 0; 4055 } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain || 4056 h264type.eProfile == OMX_VIDEO_AVCProfileHigh) { 4057 h264type.nSliceHeaderSpacing = 0; 4058 h264type.bUseHadamard = OMX_TRUE; 4059 h264type.nRefFrames = 2; 4060 h264type.nBFrames = 1; 4061 h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate); 4062 h264type.nAllowedPictureTypes = 4063 OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB; 4064 h264type.nRefIdx10ActiveMinus1 = 0; 4065 h264type.nRefIdx11ActiveMinus1 = 0; 4066 h264type.bEntropyCodingCABAC = OMX_TRUE; 4067 h264type.bWeightedPPrediction = OMX_TRUE; 4068 h264type.bconstIpred = OMX_TRUE; 4069 h264type.bDirect8x8Inference = OMX_TRUE; 4070 h264type.bDirectSpatialTemporal = OMX_TRUE; 4071 h264type.nCabacInitIdc = 1; 4072 } 4073 4074 if (h264type.nBFrames != 0) { 4075 h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB; 4076 } 4077 4078 h264type.bEnableUEP = OMX_FALSE; 4079 h264type.bEnableFMO = OMX_FALSE; 4080 h264type.bEnableASO = OMX_FALSE; 4081 h264type.bEnableRS = OMX_FALSE; 4082 h264type.bFrameMBsOnly = OMX_TRUE; 4083 h264type.bMBAFF = OMX_FALSE; 4084 h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable; 4085 4086 err = mOMX->setParameter( 4087 mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type)); 4088 4089 if (err != OK) { 4090 return err; 4091 } 4092 4093 return configureBitrate(bitrate, bitrateMode); 4094} 4095 4096status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) { 4097 int32_t bitrate, iFrameInterval; 4098 if (!msg->findInt32("bitrate", &bitrate) 4099 || !msg->findInt32("i-frame-interval", &iFrameInterval)) { 4100 return INVALID_OPERATION; 4101 } 4102 4103 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4104 4105 float frameRate; 4106 if (!msg->findFloat("frame-rate", &frameRate)) { 4107 int32_t tmp; 4108 if (!msg->findInt32("frame-rate", &tmp)) { 4109 return INVALID_OPERATION; 4110 } 4111 frameRate = (float)tmp; 4112 } 4113 4114 OMX_VIDEO_PARAM_HEVCTYPE hevcType; 4115 InitOMXParams(&hevcType); 4116 hevcType.nPortIndex = kPortIndexOutput; 4117 4118 status_t err = OK; 4119 err = mOMX->getParameter( 4120 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4121 if (err != OK) { 4122 return err; 4123 } 4124 4125 int32_t profile; 4126 if (msg->findInt32("profile", &profile)) { 4127 int32_t level; 4128 if (!msg->findInt32("level", &level)) { 4129 return INVALID_OPERATION; 4130 } 4131 4132 err = verifySupportForProfileAndLevel(profile, level); 4133 if (err != OK) { 4134 return err; 4135 } 4136 4137 hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile); 4138 hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level); 4139 } 4140 // TODO: finer control? 4141 hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4142 4143 err = mOMX->setParameter( 4144 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType)); 4145 if (err != OK) { 4146 return err; 4147 } 4148 4149 return configureBitrate(bitrate, bitrateMode); 4150} 4151 4152status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg) { 4153 int32_t bitrate; 4154 int32_t iFrameInterval = 0; 4155 size_t tsLayers = 0; 4156 OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern = 4157 OMX_VIDEO_VPXTemporalLayerPatternNone; 4158 static const uint32_t kVp8LayerRateAlloction 4159 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] 4160 [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = { 4161 {100, 100, 100}, // 1 layer 4162 { 60, 100, 100}, // 2 layers {60%, 40%} 4163 { 40, 60, 100}, // 3 layers {40%, 20%, 40%} 4164 }; 4165 if (!msg->findInt32("bitrate", &bitrate)) { 4166 return INVALID_OPERATION; 4167 } 4168 msg->findInt32("i-frame-interval", &iFrameInterval); 4169 4170 OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg); 4171 4172 float frameRate; 4173 if (!msg->findFloat("frame-rate", &frameRate)) { 4174 int32_t tmp; 4175 if (!msg->findInt32("frame-rate", &tmp)) { 4176 return INVALID_OPERATION; 4177 } 4178 frameRate = (float)tmp; 4179 } 4180 4181 AString tsSchema; 4182 if (msg->findString("ts-schema", &tsSchema)) { 4183 if (tsSchema == "webrtc.vp8.1-layer") { 4184 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4185 tsLayers = 1; 4186 } else if (tsSchema == "webrtc.vp8.2-layer") { 4187 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4188 tsLayers = 2; 4189 } else if (tsSchema == "webrtc.vp8.3-layer") { 4190 pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC; 4191 tsLayers = 3; 4192 } else { 4193 ALOGW("Unsupported ts-schema [%s]", tsSchema.c_str()); 4194 } 4195 } 4196 4197 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4198 InitOMXParams(&vp8type); 4199 vp8type.nPortIndex = kPortIndexOutput; 4200 status_t err = mOMX->getParameter( 4201 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4202 &vp8type, sizeof(vp8type)); 4203 4204 if (err == OK) { 4205 if (iFrameInterval > 0) { 4206 vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate); 4207 } 4208 vp8type.eTemporalPattern = pattern; 4209 vp8type.nTemporalLayerCount = tsLayers; 4210 if (tsLayers > 0) { 4211 for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) { 4212 vp8type.nTemporalLayerBitrateRatio[i] = 4213 kVp8LayerRateAlloction[tsLayers - 1][i]; 4214 } 4215 } 4216 if (bitrateMode == OMX_Video_ControlRateConstant) { 4217 vp8type.nMinQuantizer = 2; 4218 vp8type.nMaxQuantizer = 63; 4219 } 4220 4221 err = mOMX->setParameter( 4222 mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4223 &vp8type, sizeof(vp8type)); 4224 if (err != OK) { 4225 ALOGW("Extended VP8 parameters set failed: %d", err); 4226 } 4227 } 4228 4229 return configureBitrate(bitrate, bitrateMode); 4230} 4231 4232status_t ACodec::verifySupportForProfileAndLevel( 4233 int32_t profile, int32_t level) { 4234 OMX_VIDEO_PARAM_PROFILELEVELTYPE params; 4235 InitOMXParams(¶ms); 4236 params.nPortIndex = kPortIndexOutput; 4237 4238 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 4239 params.nProfileIndex = index; 4240 status_t err = mOMX->getParameter( 4241 mNode, 4242 OMX_IndexParamVideoProfileLevelQuerySupported, 4243 ¶ms, 4244 sizeof(params)); 4245 4246 if (err != OK) { 4247 return err; 4248 } 4249 4250 int32_t supportedProfile = static_cast<int32_t>(params.eProfile); 4251 int32_t supportedLevel = static_cast<int32_t>(params.eLevel); 4252 4253 if (profile == supportedProfile && level <= supportedLevel) { 4254 return OK; 4255 } 4256 4257 if (index == kMaxIndicesToCheck) { 4258 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 4259 mComponentName.c_str(), index, 4260 params.eProfile, params.eLevel); 4261 } 4262 } 4263 return ERROR_UNSUPPORTED; 4264} 4265 4266status_t ACodec::configureBitrate( 4267 int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) { 4268 OMX_VIDEO_PARAM_BITRATETYPE bitrateType; 4269 InitOMXParams(&bitrateType); 4270 bitrateType.nPortIndex = kPortIndexOutput; 4271 4272 status_t err = mOMX->getParameter( 4273 mNode, OMX_IndexParamVideoBitrate, 4274 &bitrateType, sizeof(bitrateType)); 4275 4276 if (err != OK) { 4277 return err; 4278 } 4279 4280 bitrateType.eControlRate = bitrateMode; 4281 bitrateType.nTargetBitrate = bitrate; 4282 4283 return mOMX->setParameter( 4284 mNode, OMX_IndexParamVideoBitrate, 4285 &bitrateType, sizeof(bitrateType)); 4286} 4287 4288status_t ACodec::setupErrorCorrectionParameters() { 4289 OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType; 4290 InitOMXParams(&errorCorrectionType); 4291 errorCorrectionType.nPortIndex = kPortIndexOutput; 4292 4293 status_t err = mOMX->getParameter( 4294 mNode, OMX_IndexParamVideoErrorCorrection, 4295 &errorCorrectionType, sizeof(errorCorrectionType)); 4296 4297 if (err != OK) { 4298 return OK; // Optional feature. Ignore this failure 4299 } 4300 4301 errorCorrectionType.bEnableHEC = OMX_FALSE; 4302 errorCorrectionType.bEnableResync = OMX_TRUE; 4303 errorCorrectionType.nResynchMarkerSpacing = 256; 4304 errorCorrectionType.bEnableDataPartitioning = OMX_FALSE; 4305 errorCorrectionType.bEnableRVLC = OMX_FALSE; 4306 4307 return mOMX->setParameter( 4308 mNode, OMX_IndexParamVideoErrorCorrection, 4309 &errorCorrectionType, sizeof(errorCorrectionType)); 4310} 4311 4312status_t ACodec::setVideoFormatOnPort( 4313 OMX_U32 portIndex, 4314 int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat, 4315 float frameRate) { 4316 OMX_PARAM_PORTDEFINITIONTYPE def; 4317 InitOMXParams(&def); 4318 def.nPortIndex = portIndex; 4319 4320 OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video; 4321 4322 status_t err = mOMX->getParameter( 4323 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4324 if (err != OK) { 4325 return err; 4326 } 4327 4328 if (portIndex == kPortIndexInput) { 4329 // XXX Need a (much) better heuristic to compute input buffer sizes. 4330 const size_t X = 64 * 1024; 4331 if (def.nBufferSize < X) { 4332 def.nBufferSize = X; 4333 } 4334 } 4335 4336 if (def.eDomain != OMX_PortDomainVideo) { 4337 ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain); 4338 return FAILED_TRANSACTION; 4339 } 4340 4341 video_def->nFrameWidth = width; 4342 video_def->nFrameHeight = height; 4343 4344 if (portIndex == kPortIndexInput) { 4345 video_def->eCompressionFormat = compressionFormat; 4346 video_def->eColorFormat = OMX_COLOR_FormatUnused; 4347 if (frameRate >= 0) { 4348 video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f); 4349 } 4350 } 4351 4352 err = mOMX->setParameter( 4353 mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4354 4355 return err; 4356} 4357 4358status_t ACodec::initNativeWindow() { 4359 if (mNativeWindow != NULL) { 4360 return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE); 4361 } 4362 4363 mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE); 4364 return OK; 4365} 4366 4367size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const { 4368 size_t n = 0; 4369 4370 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4371 const BufferInfo &info = mBuffers[portIndex].itemAt(i); 4372 4373 if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) { 4374 ++n; 4375 } 4376 } 4377 4378 return n; 4379} 4380 4381size_t ACodec::countBuffersOwnedByNativeWindow() const { 4382 size_t n = 0; 4383 4384 for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) { 4385 const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i); 4386 4387 if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4388 ++n; 4389 } 4390 } 4391 4392 return n; 4393} 4394 4395void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() { 4396 if (mNativeWindow == NULL) { 4397 return; 4398 } 4399 4400 while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers 4401 && dequeueBufferFromNativeWindow() != NULL) { 4402 // these buffers will be submitted as regular buffers; account for this 4403 if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) { 4404 --mMetadataBuffersToSubmit; 4405 } 4406 } 4407} 4408 4409bool ACodec::allYourBuffersAreBelongToUs( 4410 OMX_U32 portIndex) { 4411 for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) { 4412 BufferInfo *info = &mBuffers[portIndex].editItemAt(i); 4413 4414 if (info->mStatus != BufferInfo::OWNED_BY_US 4415 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 4416 ALOGV("[%s] Buffer %u on port %u still has status %d", 4417 mComponentName.c_str(), 4418 info->mBufferID, portIndex, info->mStatus); 4419 return false; 4420 } 4421 } 4422 4423 return true; 4424} 4425 4426bool ACodec::allYourBuffersAreBelongToUs() { 4427 return allYourBuffersAreBelongToUs(kPortIndexInput) 4428 && allYourBuffersAreBelongToUs(kPortIndexOutput); 4429} 4430 4431void ACodec::deferMessage(const sp<AMessage> &msg) { 4432 mDeferredQueue.push_back(msg); 4433} 4434 4435void ACodec::processDeferredMessages() { 4436 List<sp<AMessage> > queue = mDeferredQueue; 4437 mDeferredQueue.clear(); 4438 4439 List<sp<AMessage> >::iterator it = queue.begin(); 4440 while (it != queue.end()) { 4441 onMessageReceived(*it++); 4442 } 4443} 4444 4445// static 4446bool ACodec::describeDefaultColorFormat(DescribeColorFormat2Params ¶ms) { 4447 MediaImage2 &image = params.sMediaImage; 4448 memset(&image, 0, sizeof(image)); 4449 4450 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4451 image.mNumPlanes = 0; 4452 4453 const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat; 4454 image.mWidth = params.nFrameWidth; 4455 image.mHeight = params.nFrameHeight; 4456 4457 // only supporting YUV420 4458 if (fmt != OMX_COLOR_FormatYUV420Planar && 4459 fmt != OMX_COLOR_FormatYUV420PackedPlanar && 4460 fmt != OMX_COLOR_FormatYUV420SemiPlanar && 4461 fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar && 4462 fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) { 4463 ALOGW("do not know color format 0x%x = %d", fmt, fmt); 4464 return false; 4465 } 4466 4467 // TEMPORARY FIX for some vendors that advertise sliceHeight as 0 4468 if (params.nStride != 0 && params.nSliceHeight == 0) { 4469 ALOGW("using sliceHeight=%u instead of what codec advertised (=0)", 4470 params.nFrameHeight); 4471 params.nSliceHeight = params.nFrameHeight; 4472 } 4473 4474 // we need stride and slice-height to be non-zero and sensible. These values were chosen to 4475 // prevent integer overflows further down the line, and do not indicate support for 4476 // 32kx32k video. 4477 if (params.nStride == 0 || params.nSliceHeight == 0 4478 || params.nStride > 32768 || params.nSliceHeight > 32768) { 4479 ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u", 4480 fmt, fmt, params.nStride, params.nSliceHeight); 4481 return false; 4482 } 4483 4484 // set-up YUV format 4485 image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV; 4486 image.mNumPlanes = 3; 4487 image.mBitDepth = 8; 4488 image.mBitDepthAllocated = 8; 4489 image.mPlane[image.Y].mOffset = 0; 4490 image.mPlane[image.Y].mColInc = 1; 4491 image.mPlane[image.Y].mRowInc = params.nStride; 4492 image.mPlane[image.Y].mHorizSubsampling = 1; 4493 image.mPlane[image.Y].mVertSubsampling = 1; 4494 4495 switch ((int)fmt) { 4496 case HAL_PIXEL_FORMAT_YV12: 4497 if (params.bUsingNativeBuffers) { 4498 size_t ystride = align(params.nStride, 16); 4499 size_t cstride = align(params.nStride / 2, 16); 4500 image.mPlane[image.Y].mRowInc = ystride; 4501 4502 image.mPlane[image.V].mOffset = ystride * params.nSliceHeight; 4503 image.mPlane[image.V].mColInc = 1; 4504 image.mPlane[image.V].mRowInc = cstride; 4505 image.mPlane[image.V].mHorizSubsampling = 2; 4506 image.mPlane[image.V].mVertSubsampling = 2; 4507 4508 image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset 4509 + (cstride * params.nSliceHeight / 2); 4510 image.mPlane[image.U].mColInc = 1; 4511 image.mPlane[image.U].mRowInc = cstride; 4512 image.mPlane[image.U].mHorizSubsampling = 2; 4513 image.mPlane[image.U].mVertSubsampling = 2; 4514 break; 4515 } else { 4516 // fall through as YV12 is used for YUV420Planar by some codecs 4517 } 4518 4519 case OMX_COLOR_FormatYUV420Planar: 4520 case OMX_COLOR_FormatYUV420PackedPlanar: 4521 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4522 image.mPlane[image.U].mColInc = 1; 4523 image.mPlane[image.U].mRowInc = params.nStride / 2; 4524 image.mPlane[image.U].mHorizSubsampling = 2; 4525 image.mPlane[image.U].mVertSubsampling = 2; 4526 4527 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset 4528 + (params.nStride * params.nSliceHeight / 4); 4529 image.mPlane[image.V].mColInc = 1; 4530 image.mPlane[image.V].mRowInc = params.nStride / 2; 4531 image.mPlane[image.V].mHorizSubsampling = 2; 4532 image.mPlane[image.V].mVertSubsampling = 2; 4533 break; 4534 4535 case OMX_COLOR_FormatYUV420SemiPlanar: 4536 // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder 4537 case OMX_COLOR_FormatYUV420PackedSemiPlanar: 4538 // NV12 4539 image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight; 4540 image.mPlane[image.U].mColInc = 2; 4541 image.mPlane[image.U].mRowInc = params.nStride; 4542 image.mPlane[image.U].mHorizSubsampling = 2; 4543 image.mPlane[image.U].mVertSubsampling = 2; 4544 4545 image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1; 4546 image.mPlane[image.V].mColInc = 2; 4547 image.mPlane[image.V].mRowInc = params.nStride; 4548 image.mPlane[image.V].mHorizSubsampling = 2; 4549 image.mPlane[image.V].mVertSubsampling = 2; 4550 break; 4551 4552 default: 4553 TRESPASS(); 4554 } 4555 return true; 4556} 4557 4558// static 4559bool ACodec::describeColorFormat( 4560 const sp<IOMX> &omx, IOMX::node_id node, 4561 DescribeColorFormat2Params &describeParams) 4562{ 4563 OMX_INDEXTYPE describeColorFormatIndex; 4564 if (omx->getExtensionIndex( 4565 node, "OMX.google.android.index.describeColorFormat", 4566 &describeColorFormatIndex) == OK) { 4567 DescribeColorFormatParams describeParamsV1(describeParams); 4568 if (omx->getParameter( 4569 node, describeColorFormatIndex, 4570 &describeParamsV1, sizeof(describeParamsV1)) == OK) { 4571 describeParams.initFromV1(describeParamsV1); 4572 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4573 } 4574 } else if (omx->getExtensionIndex( 4575 node, "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK 4576 && omx->getParameter( 4577 node, describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) { 4578 return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN; 4579 } 4580 4581 return describeDefaultColorFormat(describeParams); 4582} 4583 4584// static 4585bool ACodec::isFlexibleColorFormat( 4586 const sp<IOMX> &omx, IOMX::node_id node, 4587 uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) { 4588 DescribeColorFormat2Params describeParams; 4589 InitOMXParams(&describeParams); 4590 describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat; 4591 // reasonable dummy values 4592 describeParams.nFrameWidth = 128; 4593 describeParams.nFrameHeight = 128; 4594 describeParams.nStride = 128; 4595 describeParams.nSliceHeight = 128; 4596 describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers; 4597 4598 CHECK(flexibleEquivalent != NULL); 4599 4600 if (!describeColorFormat(omx, node, describeParams)) { 4601 return false; 4602 } 4603 4604 const MediaImage2 &img = describeParams.sMediaImage; 4605 if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) { 4606 if (img.mNumPlanes != 3 4607 || img.mPlane[img.Y].mHorizSubsampling != 1 4608 || img.mPlane[img.Y].mVertSubsampling != 1) { 4609 return false; 4610 } 4611 4612 // YUV 420 4613 if (img.mPlane[img.U].mHorizSubsampling == 2 4614 && img.mPlane[img.U].mVertSubsampling == 2 4615 && img.mPlane[img.V].mHorizSubsampling == 2 4616 && img.mPlane[img.V].mVertSubsampling == 2) { 4617 // possible flexible YUV420 format 4618 if (img.mBitDepth <= 8) { 4619 *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible; 4620 return true; 4621 } 4622 } 4623 } 4624 return false; 4625} 4626 4627status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> ¬ify) { 4628 const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output"; 4629 OMX_PARAM_PORTDEFINITIONTYPE def; 4630 InitOMXParams(&def); 4631 def.nPortIndex = portIndex; 4632 4633 status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def)); 4634 if (err != OK) { 4635 return err; 4636 } 4637 4638 if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) { 4639 ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex); 4640 return BAD_VALUE; 4641 } 4642 4643 switch (def.eDomain) { 4644 case OMX_PortDomainVideo: 4645 { 4646 OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video; 4647 switch ((int)videoDef->eCompressionFormat) { 4648 case OMX_VIDEO_CodingUnused: 4649 { 4650 CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput)); 4651 notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW); 4652 4653 notify->setInt32("stride", videoDef->nStride); 4654 notify->setInt32("slice-height", videoDef->nSliceHeight); 4655 notify->setInt32("color-format", videoDef->eColorFormat); 4656 4657 if (mNativeWindow == NULL) { 4658 DescribeColorFormat2Params describeParams; 4659 InitOMXParams(&describeParams); 4660 describeParams.eColorFormat = videoDef->eColorFormat; 4661 describeParams.nFrameWidth = videoDef->nFrameWidth; 4662 describeParams.nFrameHeight = videoDef->nFrameHeight; 4663 describeParams.nStride = videoDef->nStride; 4664 describeParams.nSliceHeight = videoDef->nSliceHeight; 4665 describeParams.bUsingNativeBuffers = OMX_FALSE; 4666 4667 if (describeColorFormat(mOMX, mNode, describeParams)) { 4668 notify->setBuffer( 4669 "image-data", 4670 ABuffer::CreateAsCopy( 4671 &describeParams.sMediaImage, 4672 sizeof(describeParams.sMediaImage))); 4673 4674 MediaImage2 &img = describeParams.sMediaImage; 4675 MediaImage2::PlaneInfo *plane = img.mPlane; 4676 ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }", 4677 mComponentName.c_str(), img.mWidth, img.mHeight, 4678 plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc, 4679 plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc, 4680 plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc); 4681 } 4682 } 4683 4684 int32_t width = (int32_t)videoDef->nFrameWidth; 4685 int32_t height = (int32_t)videoDef->nFrameHeight; 4686 4687 if (portIndex == kPortIndexOutput) { 4688 OMX_CONFIG_RECTTYPE rect; 4689 InitOMXParams(&rect); 4690 rect.nPortIndex = portIndex; 4691 4692 if (mOMX->getConfig( 4693 mNode, 4694 (portIndex == kPortIndexOutput ? 4695 OMX_IndexConfigCommonOutputCrop : 4696 OMX_IndexConfigCommonInputCrop), 4697 &rect, sizeof(rect)) != OK) { 4698 rect.nLeft = 0; 4699 rect.nTop = 0; 4700 rect.nWidth = videoDef->nFrameWidth; 4701 rect.nHeight = videoDef->nFrameHeight; 4702 } 4703 4704 if (rect.nLeft < 0 || 4705 rect.nTop < 0 || 4706 rect.nLeft + rect.nWidth > videoDef->nFrameWidth || 4707 rect.nTop + rect.nHeight > videoDef->nFrameHeight) { 4708 ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)", 4709 rect.nLeft, rect.nTop, 4710 rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight, 4711 videoDef->nFrameWidth, videoDef->nFrameHeight); 4712 return BAD_VALUE; 4713 } 4714 4715 notify->setRect( 4716 "crop", 4717 rect.nLeft, 4718 rect.nTop, 4719 rect.nLeft + rect.nWidth - 1, 4720 rect.nTop + rect.nHeight - 1); 4721 4722 width = rect.nWidth; 4723 height = rect.nHeight; 4724 4725 android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN; 4726 (void)getColorAspectsAndDataSpaceForVideoDecoder( 4727 width, height, mConfigFormat, notify, 4728 mUsingNativeWindow ? &dataSpace : NULL); 4729 if (mUsingNativeWindow) { 4730 notify->setInt32("android._dataspace", dataSpace); 4731 } 4732 (void)getHDRStaticInfoForVideoDecoder(notify); 4733 } else { 4734 (void)getInputColorAspectsForVideoEncoder(notify); 4735 } 4736 4737 break; 4738 } 4739 4740 case OMX_VIDEO_CodingVP8: 4741 case OMX_VIDEO_CodingVP9: 4742 { 4743 OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type; 4744 InitOMXParams(&vp8type); 4745 vp8type.nPortIndex = kPortIndexOutput; 4746 status_t err = mOMX->getParameter( 4747 mNode, 4748 (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder, 4749 &vp8type, 4750 sizeof(vp8type)); 4751 4752 if (err == OK) { 4753 AString tsSchema = "none"; 4754 if (vp8type.eTemporalPattern 4755 == OMX_VIDEO_VPXTemporalLayerPatternWebRTC) { 4756 switch (vp8type.nTemporalLayerCount) { 4757 case 1: 4758 { 4759 tsSchema = "webrtc.vp8.1-layer"; 4760 break; 4761 } 4762 case 2: 4763 { 4764 tsSchema = "webrtc.vp8.2-layer"; 4765 break; 4766 } 4767 case 3: 4768 { 4769 tsSchema = "webrtc.vp8.3-layer"; 4770 break; 4771 } 4772 default: 4773 { 4774 break; 4775 } 4776 } 4777 } 4778 notify->setString("ts-schema", tsSchema); 4779 } 4780 // Fall through to set up mime. 4781 } 4782 4783 default: 4784 { 4785 if (mIsEncoder ^ (portIndex == kPortIndexOutput)) { 4786 // should be CodingUnused 4787 ALOGE("Raw port video compression format is %s(%d)", 4788 asString(videoDef->eCompressionFormat), 4789 videoDef->eCompressionFormat); 4790 return BAD_VALUE; 4791 } 4792 AString mime; 4793 if (GetMimeTypeForVideoCoding( 4794 videoDef->eCompressionFormat, &mime) != OK) { 4795 notify->setString("mime", "application/octet-stream"); 4796 } else { 4797 notify->setString("mime", mime.c_str()); 4798 } 4799 uint32_t intraRefreshPeriod = 0; 4800 if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK 4801 && intraRefreshPeriod > 0) { 4802 notify->setInt32("intra-refresh-period", intraRefreshPeriod); 4803 } 4804 break; 4805 } 4806 } 4807 notify->setInt32("width", videoDef->nFrameWidth); 4808 notify->setInt32("height", videoDef->nFrameHeight); 4809 ALOGV("[%s] %s format is %s", mComponentName.c_str(), 4810 portIndex == kPortIndexInput ? "input" : "output", 4811 notify->debugString().c_str()); 4812 4813 break; 4814 } 4815 4816 case OMX_PortDomainAudio: 4817 { 4818 OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio; 4819 4820 switch ((int)audioDef->eEncoding) { 4821 case OMX_AUDIO_CodingPCM: 4822 { 4823 OMX_AUDIO_PARAM_PCMMODETYPE params; 4824 InitOMXParams(¶ms); 4825 params.nPortIndex = portIndex; 4826 4827 err = mOMX->getParameter( 4828 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 4829 if (err != OK) { 4830 return err; 4831 } 4832 4833 if (params.nChannels <= 0 4834 || (params.nChannels != 1 && !params.bInterleaved) 4835 || params.ePCMMode != OMX_AUDIO_PCMModeLinear) { 4836 ALOGE("unsupported PCM port: %u channels%s, %u-bit", 4837 params.nChannels, 4838 params.bInterleaved ? " interleaved" : "", 4839 params.nBitPerSample); 4840 return FAILED_TRANSACTION; 4841 } 4842 4843 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW); 4844 notify->setInt32("channel-count", params.nChannels); 4845 notify->setInt32("sample-rate", params.nSamplingRate); 4846 4847 AudioEncoding encoding = kAudioEncodingPcm16bit; 4848 if (params.eNumData == OMX_NumericalDataUnsigned 4849 && params.nBitPerSample == 8u) { 4850 encoding = kAudioEncodingPcm8bit; 4851 } else if (params.eNumData == OMX_NumericalDataFloat 4852 && params.nBitPerSample == 32u) { 4853 encoding = kAudioEncodingPcmFloat; 4854 } else if (params.nBitPerSample != 16u 4855 || params.eNumData != OMX_NumericalDataSigned) { 4856 ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ", 4857 asString(params.eNumData), params.eNumData, 4858 asString(params.ePCMMode), params.ePCMMode); 4859 return FAILED_TRANSACTION; 4860 } 4861 notify->setInt32("pcm-encoding", encoding); 4862 4863 if (mChannelMaskPresent) { 4864 notify->setInt32("channel-mask", mChannelMask); 4865 } 4866 break; 4867 } 4868 4869 case OMX_AUDIO_CodingAAC: 4870 { 4871 OMX_AUDIO_PARAM_AACPROFILETYPE params; 4872 InitOMXParams(¶ms); 4873 params.nPortIndex = portIndex; 4874 4875 err = mOMX->getParameter( 4876 mNode, OMX_IndexParamAudioAac, ¶ms, sizeof(params)); 4877 if (err != OK) { 4878 return err; 4879 } 4880 4881 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC); 4882 notify->setInt32("channel-count", params.nChannels); 4883 notify->setInt32("sample-rate", params.nSampleRate); 4884 break; 4885 } 4886 4887 case OMX_AUDIO_CodingAMR: 4888 { 4889 OMX_AUDIO_PARAM_AMRTYPE params; 4890 InitOMXParams(¶ms); 4891 params.nPortIndex = portIndex; 4892 4893 err = mOMX->getParameter( 4894 mNode, OMX_IndexParamAudioAmr, ¶ms, sizeof(params)); 4895 if (err != OK) { 4896 return err; 4897 } 4898 4899 notify->setInt32("channel-count", 1); 4900 if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) { 4901 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB); 4902 notify->setInt32("sample-rate", 16000); 4903 } else { 4904 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB); 4905 notify->setInt32("sample-rate", 8000); 4906 } 4907 break; 4908 } 4909 4910 case OMX_AUDIO_CodingFLAC: 4911 { 4912 OMX_AUDIO_PARAM_FLACTYPE params; 4913 InitOMXParams(¶ms); 4914 params.nPortIndex = portIndex; 4915 4916 err = mOMX->getParameter( 4917 mNode, OMX_IndexParamAudioFlac, ¶ms, sizeof(params)); 4918 if (err != OK) { 4919 return err; 4920 } 4921 4922 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC); 4923 notify->setInt32("channel-count", params.nChannels); 4924 notify->setInt32("sample-rate", params.nSampleRate); 4925 break; 4926 } 4927 4928 case OMX_AUDIO_CodingMP3: 4929 { 4930 OMX_AUDIO_PARAM_MP3TYPE params; 4931 InitOMXParams(¶ms); 4932 params.nPortIndex = portIndex; 4933 4934 err = mOMX->getParameter( 4935 mNode, OMX_IndexParamAudioMp3, ¶ms, sizeof(params)); 4936 if (err != OK) { 4937 return err; 4938 } 4939 4940 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG); 4941 notify->setInt32("channel-count", params.nChannels); 4942 notify->setInt32("sample-rate", params.nSampleRate); 4943 break; 4944 } 4945 4946 case OMX_AUDIO_CodingVORBIS: 4947 { 4948 OMX_AUDIO_PARAM_VORBISTYPE params; 4949 InitOMXParams(¶ms); 4950 params.nPortIndex = portIndex; 4951 4952 err = mOMX->getParameter( 4953 mNode, OMX_IndexParamAudioVorbis, ¶ms, sizeof(params)); 4954 if (err != OK) { 4955 return err; 4956 } 4957 4958 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS); 4959 notify->setInt32("channel-count", params.nChannels); 4960 notify->setInt32("sample-rate", params.nSampleRate); 4961 break; 4962 } 4963 4964 case OMX_AUDIO_CodingAndroidAC3: 4965 { 4966 OMX_AUDIO_PARAM_ANDROID_AC3TYPE params; 4967 InitOMXParams(¶ms); 4968 params.nPortIndex = portIndex; 4969 4970 err = mOMX->getParameter( 4971 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3, 4972 ¶ms, sizeof(params)); 4973 if (err != OK) { 4974 return err; 4975 } 4976 4977 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3); 4978 notify->setInt32("channel-count", params.nChannels); 4979 notify->setInt32("sample-rate", params.nSampleRate); 4980 break; 4981 } 4982 4983 case OMX_AUDIO_CodingAndroidEAC3: 4984 { 4985 OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params; 4986 InitOMXParams(¶ms); 4987 params.nPortIndex = portIndex; 4988 4989 err = mOMX->getParameter( 4990 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3, 4991 ¶ms, sizeof(params)); 4992 if (err != OK) { 4993 return err; 4994 } 4995 4996 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3); 4997 notify->setInt32("channel-count", params.nChannels); 4998 notify->setInt32("sample-rate", params.nSampleRate); 4999 break; 5000 } 5001 5002 case OMX_AUDIO_CodingAndroidOPUS: 5003 { 5004 OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params; 5005 InitOMXParams(¶ms); 5006 params.nPortIndex = portIndex; 5007 5008 err = mOMX->getParameter( 5009 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidOpus, 5010 ¶ms, sizeof(params)); 5011 if (err != OK) { 5012 return err; 5013 } 5014 5015 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_OPUS); 5016 notify->setInt32("channel-count", params.nChannels); 5017 notify->setInt32("sample-rate", params.nSampleRate); 5018 break; 5019 } 5020 5021 case OMX_AUDIO_CodingG711: 5022 { 5023 OMX_AUDIO_PARAM_PCMMODETYPE params; 5024 InitOMXParams(¶ms); 5025 params.nPortIndex = portIndex; 5026 5027 err = mOMX->getParameter( 5028 mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5029 if (err != OK) { 5030 return err; 5031 } 5032 5033 const char *mime = NULL; 5034 if (params.ePCMMode == OMX_AUDIO_PCMModeMULaw) { 5035 mime = MEDIA_MIMETYPE_AUDIO_G711_MLAW; 5036 } else if (params.ePCMMode == OMX_AUDIO_PCMModeALaw) { 5037 mime = MEDIA_MIMETYPE_AUDIO_G711_ALAW; 5038 } else { // params.ePCMMode == OMX_AUDIO_PCMModeLinear 5039 mime = MEDIA_MIMETYPE_AUDIO_RAW; 5040 } 5041 notify->setString("mime", mime); 5042 notify->setInt32("channel-count", params.nChannels); 5043 notify->setInt32("sample-rate", params.nSamplingRate); 5044 notify->setInt32("pcm-encoding", kAudioEncodingPcm16bit); 5045 break; 5046 } 5047 5048 case OMX_AUDIO_CodingGSMFR: 5049 { 5050 OMX_AUDIO_PARAM_PCMMODETYPE params; 5051 InitOMXParams(¶ms); 5052 params.nPortIndex = portIndex; 5053 5054 err = mOMX->getParameter( 5055 mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params)); 5056 if (err != OK) { 5057 return err; 5058 } 5059 5060 notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MSGSM); 5061 notify->setInt32("channel-count", params.nChannels); 5062 notify->setInt32("sample-rate", params.nSamplingRate); 5063 break; 5064 } 5065 5066 default: 5067 ALOGE("Unsupported audio coding: %s(%d)\n", 5068 asString(audioDef->eEncoding), audioDef->eEncoding); 5069 return BAD_TYPE; 5070 } 5071 break; 5072 } 5073 5074 default: 5075 ALOGE("Unsupported domain: %s(%d)", asString(def.eDomain), def.eDomain); 5076 return BAD_TYPE; 5077 } 5078 5079 return OK; 5080} 5081 5082void ACodec::onDataSpaceChanged(android_dataspace dataSpace, const ColorAspects &aspects) { 5083 // aspects are normally communicated in ColorAspects 5084 int32_t range, standard, transfer; 5085 convertCodecColorAspectsToPlatformAspects(aspects, &range, &standard, &transfer); 5086 5087 // if some aspects are unspecified, use dataspace fields 5088 if (range != 0) { 5089 range = (dataSpace & HAL_DATASPACE_RANGE_MASK) >> HAL_DATASPACE_RANGE_SHIFT; 5090 } 5091 if (standard != 0) { 5092 standard = (dataSpace & HAL_DATASPACE_STANDARD_MASK) >> HAL_DATASPACE_STANDARD_SHIFT; 5093 } 5094 if (transfer != 0) { 5095 transfer = (dataSpace & HAL_DATASPACE_TRANSFER_MASK) >> HAL_DATASPACE_TRANSFER_SHIFT; 5096 } 5097 5098 mOutputFormat = mOutputFormat->dup(); // trigger an output format changed event 5099 if (range != 0) { 5100 mOutputFormat->setInt32("color-range", range); 5101 } 5102 if (standard != 0) { 5103 mOutputFormat->setInt32("color-standard", standard); 5104 } 5105 if (transfer != 0) { 5106 mOutputFormat->setInt32("color-transfer", transfer); 5107 } 5108 5109 ALOGD("dataspace changed to %#x (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) " 5110 "(R:%d(%s), S:%d(%s), T:%d(%s))", 5111 dataSpace, 5112 aspects.mRange, asString(aspects.mRange), 5113 aspects.mPrimaries, asString(aspects.mPrimaries), 5114 aspects.mMatrixCoeffs, asString(aspects.mMatrixCoeffs), 5115 aspects.mTransfer, asString(aspects.mTransfer), 5116 range, asString((ColorRange)range), 5117 standard, asString((ColorStandard)standard), 5118 transfer, asString((ColorTransfer)transfer)); 5119} 5120 5121void ACodec::onOutputFormatChanged(sp<const AMessage> expectedFormat) { 5122 // store new output format, at the same time mark that this is no longer the first frame 5123 mOutputFormat = mBaseOutputFormat->dup(); 5124 5125 if (getPortFormat(kPortIndexOutput, mOutputFormat) != OK) { 5126 ALOGE("[%s] Failed to get port format to send format change", mComponentName.c_str()); 5127 return; 5128 } 5129 5130 if (expectedFormat != NULL) { 5131 sp<const AMessage> changes = expectedFormat->changesFrom(mOutputFormat); 5132 sp<const AMessage> to = mOutputFormat->changesFrom(expectedFormat); 5133 if (changes->countEntries() != 0 || to->countEntries() != 0) { 5134 ALOGW("[%s] BAD CODEC: Output format changed unexpectedly from (diff) %s to (diff) %s", 5135 mComponentName.c_str(), 5136 changes->debugString(4).c_str(), to->debugString(4).c_str()); 5137 } 5138 } 5139 5140 if (!mIsVideo && !mIsEncoder) { 5141 AudioEncoding pcmEncoding = kAudioEncodingPcm16bit; 5142 (void)mConfigFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5143 AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit; 5144 (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&pcmEncoding); 5145 5146 mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding); 5147 if (mConverter[kPortIndexOutput] != NULL) { 5148 mOutputFormat->setInt32("pcm-encoding", pcmEncoding); 5149 } 5150 } 5151 5152 if (mTunneled) { 5153 sendFormatChange(); 5154 } 5155} 5156 5157void ACodec::addKeyFormatChangesToRenderBufferNotification(sp<AMessage> ¬ify) { 5158 AString mime; 5159 CHECK(mOutputFormat->findString("mime", &mime)); 5160 5161 if (mime == MEDIA_MIMETYPE_VIDEO_RAW && mNativeWindow != NULL) { 5162 // notify renderer of the crop change and dataspace change 5163 // NOTE: native window uses extended right-bottom coordinate 5164 int32_t left, top, right, bottom; 5165 if (mOutputFormat->findRect("crop", &left, &top, &right, &bottom)) { 5166 notify->setRect("crop", left, top, right + 1, bottom + 1); 5167 } 5168 5169 int32_t dataSpace; 5170 if (mOutputFormat->findInt32("android._dataspace", &dataSpace)) { 5171 notify->setInt32("dataspace", dataSpace); 5172 } 5173 } 5174} 5175 5176void ACodec::sendFormatChange() { 5177 AString mime; 5178 CHECK(mOutputFormat->findString("mime", &mime)); 5179 5180 if (mime == MEDIA_MIMETYPE_AUDIO_RAW && (mEncoderDelay || mEncoderPadding)) { 5181 int32_t channelCount; 5182 CHECK(mOutputFormat->findInt32("channel-count", &channelCount)); 5183 if (mSkipCutBuffer != NULL) { 5184 size_t prevbufsize = mSkipCutBuffer->size(); 5185 if (prevbufsize != 0) { 5186 ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbufsize); 5187 } 5188 } 5189 mSkipCutBuffer = new SkipCutBuffer(mEncoderDelay, mEncoderPadding, channelCount); 5190 } 5191 5192 sp<AMessage> notify = mNotify->dup(); 5193 notify->setInt32("what", kWhatOutputFormatChanged); 5194 notify->setMessage("format", mOutputFormat); 5195 notify->post(); 5196 5197 // mLastOutputFormat is not used when tunneled; doing this just to stay consistent 5198 mLastOutputFormat = mOutputFormat; 5199} 5200 5201void ACodec::signalError(OMX_ERRORTYPE error, status_t internalError) { 5202 sp<AMessage> notify = mNotify->dup(); 5203 notify->setInt32("what", CodecBase::kWhatError); 5204 ALOGE("signalError(omxError %#x, internalError %d)", error, internalError); 5205 5206 if (internalError == UNKNOWN_ERROR) { // find better error code 5207 const status_t omxStatus = statusFromOMXError(error); 5208 if (omxStatus != 0) { 5209 internalError = omxStatus; 5210 } else { 5211 ALOGW("Invalid OMX error %#x", error); 5212 } 5213 } 5214 5215 mFatalError = true; 5216 5217 notify->setInt32("err", internalError); 5218 notify->setInt32("actionCode", ACTION_CODE_FATAL); // could translate from OMX error. 5219 notify->post(); 5220} 5221 5222//////////////////////////////////////////////////////////////////////////////// 5223 5224ACodec::PortDescription::PortDescription() { 5225} 5226 5227status_t ACodec::requestIDRFrame() { 5228 if (!mIsEncoder) { 5229 return ERROR_UNSUPPORTED; 5230 } 5231 5232 OMX_CONFIG_INTRAREFRESHVOPTYPE params; 5233 InitOMXParams(¶ms); 5234 5235 params.nPortIndex = kPortIndexOutput; 5236 params.IntraRefreshVOP = OMX_TRUE; 5237 5238 return mOMX->setConfig( 5239 mNode, 5240 OMX_IndexConfigVideoIntraVOPRefresh, 5241 ¶ms, 5242 sizeof(params)); 5243} 5244 5245void ACodec::PortDescription::addBuffer( 5246 IOMX::buffer_id id, const sp<ABuffer> &buffer, 5247 const sp<NativeHandle> &handle, const sp<RefBase> &memRef) { 5248 mBufferIDs.push_back(id); 5249 mBuffers.push_back(buffer); 5250 mHandles.push_back(handle); 5251 mMemRefs.push_back(memRef); 5252} 5253 5254size_t ACodec::PortDescription::countBuffers() { 5255 return mBufferIDs.size(); 5256} 5257 5258IOMX::buffer_id ACodec::PortDescription::bufferIDAt(size_t index) const { 5259 return mBufferIDs.itemAt(index); 5260} 5261 5262sp<ABuffer> ACodec::PortDescription::bufferAt(size_t index) const { 5263 return mBuffers.itemAt(index); 5264} 5265 5266sp<NativeHandle> ACodec::PortDescription::handleAt(size_t index) const { 5267 return mHandles.itemAt(index); 5268} 5269 5270sp<RefBase> ACodec::PortDescription::memRefAt(size_t index) const { 5271 return mMemRefs.itemAt(index); 5272} 5273 5274//////////////////////////////////////////////////////////////////////////////// 5275 5276ACodec::BaseState::BaseState(ACodec *codec, const sp<AState> &parentState) 5277 : AState(parentState), 5278 mCodec(codec) { 5279} 5280 5281ACodec::BaseState::PortMode ACodec::BaseState::getPortMode( 5282 OMX_U32 /* portIndex */) { 5283 return KEEP_BUFFERS; 5284} 5285 5286bool ACodec::BaseState::onMessageReceived(const sp<AMessage> &msg) { 5287 switch (msg->what()) { 5288 case kWhatInputBufferFilled: 5289 { 5290 onInputBufferFilled(msg); 5291 break; 5292 } 5293 5294 case kWhatOutputBufferDrained: 5295 { 5296 onOutputBufferDrained(msg); 5297 break; 5298 } 5299 5300 case ACodec::kWhatOMXMessageList: 5301 { 5302 return checkOMXMessage(msg) ? onOMXMessageList(msg) : true; 5303 } 5304 5305 case ACodec::kWhatOMXMessageItem: 5306 { 5307 // no need to check as we already did it for kWhatOMXMessageList 5308 return onOMXMessage(msg); 5309 } 5310 5311 case ACodec::kWhatOMXMessage: 5312 { 5313 return checkOMXMessage(msg) ? onOMXMessage(msg) : true; 5314 } 5315 5316 case ACodec::kWhatSetSurface: 5317 { 5318 sp<AReplyToken> replyID; 5319 CHECK(msg->senderAwaitsResponse(&replyID)); 5320 5321 sp<RefBase> obj; 5322 CHECK(msg->findObject("surface", &obj)); 5323 5324 status_t err = mCodec->handleSetSurface(static_cast<Surface *>(obj.get())); 5325 5326 sp<AMessage> response = new AMessage; 5327 response->setInt32("err", err); 5328 response->postReply(replyID); 5329 break; 5330 } 5331 5332 case ACodec::kWhatCreateInputSurface: 5333 case ACodec::kWhatSetInputSurface: 5334 case ACodec::kWhatSignalEndOfInputStream: 5335 { 5336 // This may result in an app illegal state exception. 5337 ALOGE("Message 0x%x was not handled", msg->what()); 5338 mCodec->signalError(OMX_ErrorUndefined, INVALID_OPERATION); 5339 return true; 5340 } 5341 5342 case ACodec::kWhatOMXDied: 5343 { 5344 // This will result in kFlagSawMediaServerDie handling in MediaCodec. 5345 ALOGE("OMX/mediaserver died, signalling error!"); 5346 mCodec->signalError(OMX_ErrorResourcesLost, DEAD_OBJECT); 5347 break; 5348 } 5349 5350 case ACodec::kWhatReleaseCodecInstance: 5351 { 5352 ALOGI("[%s] forcing the release of codec", 5353 mCodec->mComponentName.c_str()); 5354 status_t err = mCodec->mOMX->freeNode(mCodec->mNode); 5355 ALOGE_IF("[%s] failed to release codec instance: err=%d", 5356 mCodec->mComponentName.c_str(), err); 5357 sp<AMessage> notify = mCodec->mNotify->dup(); 5358 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 5359 notify->post(); 5360 break; 5361 } 5362 5363 default: 5364 return false; 5365 } 5366 5367 return true; 5368} 5369 5370bool ACodec::BaseState::checkOMXMessage(const sp<AMessage> &msg) { 5371 // there is a possibility that this is an outstanding message for a 5372 // codec that we have already destroyed 5373 if (mCodec->mNode == 0) { 5374 ALOGI("ignoring message as already freed component: %s", 5375 msg->debugString().c_str()); 5376 return false; 5377 } 5378 5379 IOMX::node_id nodeID; 5380 CHECK(msg->findInt32("node", (int32_t*)&nodeID)); 5381 if (nodeID != mCodec->mNode) { 5382 ALOGE("Unexpected message for nodeID: %u, should have been %u", nodeID, mCodec->mNode); 5383 return false; 5384 } 5385 return true; 5386} 5387 5388bool ACodec::BaseState::onOMXMessageList(const sp<AMessage> &msg) { 5389 sp<RefBase> obj; 5390 CHECK(msg->findObject("messages", &obj)); 5391 sp<MessageList> msgList = static_cast<MessageList *>(obj.get()); 5392 5393 bool receivedRenderedEvents = false; 5394 for (std::list<sp<AMessage>>::const_iterator it = msgList->getList().cbegin(); 5395 it != msgList->getList().cend(); ++it) { 5396 (*it)->setWhat(ACodec::kWhatOMXMessageItem); 5397 mCodec->handleMessage(*it); 5398 int32_t type; 5399 CHECK((*it)->findInt32("type", &type)); 5400 if (type == omx_message::FRAME_RENDERED) { 5401 receivedRenderedEvents = true; 5402 } 5403 } 5404 5405 if (receivedRenderedEvents) { 5406 // NOTE: all buffers are rendered in this case 5407 mCodec->notifyOfRenderedFrames(); 5408 } 5409 return true; 5410} 5411 5412bool ACodec::BaseState::onOMXMessage(const sp<AMessage> &msg) { 5413 int32_t type; 5414 CHECK(msg->findInt32("type", &type)); 5415 5416 switch (type) { 5417 case omx_message::EVENT: 5418 { 5419 int32_t event, data1, data2; 5420 CHECK(msg->findInt32("event", &event)); 5421 CHECK(msg->findInt32("data1", &data1)); 5422 CHECK(msg->findInt32("data2", &data2)); 5423 5424 if (event == OMX_EventCmdComplete 5425 && data1 == OMX_CommandFlush 5426 && data2 == (int32_t)OMX_ALL) { 5427 // Use of this notification is not consistent across 5428 // implementations. We'll drop this notification and rely 5429 // on flush-complete notifications on the individual port 5430 // indices instead. 5431 5432 return true; 5433 } 5434 5435 return onOMXEvent( 5436 static_cast<OMX_EVENTTYPE>(event), 5437 static_cast<OMX_U32>(data1), 5438 static_cast<OMX_U32>(data2)); 5439 } 5440 5441 case omx_message::EMPTY_BUFFER_DONE: 5442 { 5443 IOMX::buffer_id bufferID; 5444 int32_t fenceFd; 5445 5446 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5447 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5448 5449 return onOMXEmptyBufferDone(bufferID, fenceFd); 5450 } 5451 5452 case omx_message::FILL_BUFFER_DONE: 5453 { 5454 IOMX::buffer_id bufferID; 5455 CHECK(msg->findInt32("buffer", (int32_t*)&bufferID)); 5456 5457 int32_t rangeOffset, rangeLength, flags, fenceFd; 5458 int64_t timeUs; 5459 5460 CHECK(msg->findInt32("range_offset", &rangeOffset)); 5461 CHECK(msg->findInt32("range_length", &rangeLength)); 5462 CHECK(msg->findInt32("flags", &flags)); 5463 CHECK(msg->findInt64("timestamp", &timeUs)); 5464 CHECK(msg->findInt32("fence_fd", &fenceFd)); 5465 5466 return onOMXFillBufferDone( 5467 bufferID, 5468 (size_t)rangeOffset, (size_t)rangeLength, 5469 (OMX_U32)flags, 5470 timeUs, 5471 fenceFd); 5472 } 5473 5474 case omx_message::FRAME_RENDERED: 5475 { 5476 int64_t mediaTimeUs, systemNano; 5477 5478 CHECK(msg->findInt64("media_time_us", &mediaTimeUs)); 5479 CHECK(msg->findInt64("system_nano", &systemNano)); 5480 5481 return onOMXFrameRendered( 5482 mediaTimeUs, systemNano); 5483 } 5484 5485 default: 5486 ALOGE("Unexpected message type: %d", type); 5487 return false; 5488 } 5489} 5490 5491bool ACodec::BaseState::onOMXFrameRendered( 5492 int64_t mediaTimeUs __unused, nsecs_t systemNano __unused) { 5493 // ignore outside of Executing and PortSettingsChanged states 5494 return true; 5495} 5496 5497bool ACodec::BaseState::onOMXEvent( 5498 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 5499 if (event == OMX_EventDataSpaceChanged) { 5500 ColorAspects aspects; 5501 aspects.mRange = (ColorAspects::Range)((data2 >> 24) & 0xFF); 5502 aspects.mPrimaries = (ColorAspects::Primaries)((data2 >> 16) & 0xFF); 5503 aspects.mMatrixCoeffs = (ColorAspects::MatrixCoeffs)((data2 >> 8) & 0xFF); 5504 aspects.mTransfer = (ColorAspects::Transfer)(data2 & 0xFF); 5505 5506 mCodec->onDataSpaceChanged((android_dataspace)data1, aspects); 5507 return true; 5508 } 5509 5510 if (event != OMX_EventError) { 5511 ALOGV("[%s] EVENT(%d, 0x%08x, 0x%08x)", 5512 mCodec->mComponentName.c_str(), event, data1, data2); 5513 5514 return false; 5515 } 5516 5517 ALOGE("[%s] ERROR(0x%08x)", mCodec->mComponentName.c_str(), data1); 5518 5519 // verify OMX component sends back an error we expect. 5520 OMX_ERRORTYPE omxError = (OMX_ERRORTYPE)data1; 5521 if (!isOMXError(omxError)) { 5522 ALOGW("Invalid OMX error %#x", omxError); 5523 omxError = OMX_ErrorUndefined; 5524 } 5525 mCodec->signalError(omxError); 5526 5527 return true; 5528} 5529 5530bool ACodec::BaseState::onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd) { 5531 ALOGV("[%s] onOMXEmptyBufferDone %u", 5532 mCodec->mComponentName.c_str(), bufferID); 5533 5534 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5535 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5536 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5537 ALOGE("Wrong ownership in EBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5538 mCodec->dumpBuffers(kPortIndexInput); 5539 if (fenceFd >= 0) { 5540 ::close(fenceFd); 5541 } 5542 return false; 5543 } 5544 info->mStatus = BufferInfo::OWNED_BY_US; 5545 5546 // input buffers cannot take fences, so wait for any fence now 5547 (void)mCodec->waitForFence(fenceFd, "onOMXEmptyBufferDone"); 5548 fenceFd = -1; 5549 5550 // still save fence for completeness 5551 info->setWriteFence(fenceFd, "onOMXEmptyBufferDone"); 5552 5553 // We're in "store-metadata-in-buffers" mode, the underlying 5554 // OMX component had access to data that's implicitly refcounted 5555 // by this "MediaBuffer" object. Now that the OMX component has 5556 // told us that it's done with the input buffer, we can decrement 5557 // the mediaBuffer's reference count. 5558 info->mData->setMediaBufferBase(NULL); 5559 5560 PortMode mode = getPortMode(kPortIndexInput); 5561 5562 switch (mode) { 5563 case KEEP_BUFFERS: 5564 break; 5565 5566 case RESUBMIT_BUFFERS: 5567 postFillThisBuffer(info); 5568 break; 5569 5570 case FREE_BUFFERS: 5571 default: 5572 ALOGE("SHOULD NOT REACH HERE: cannot free empty output buffers"); 5573 return false; 5574 } 5575 5576 return true; 5577} 5578 5579void ACodec::BaseState::postFillThisBuffer(BufferInfo *info) { 5580 if (mCodec->mPortEOS[kPortIndexInput]) { 5581 return; 5582 } 5583 5584 CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US); 5585 5586 sp<AMessage> notify = mCodec->mNotify->dup(); 5587 notify->setInt32("what", CodecBase::kWhatFillThisBuffer); 5588 notify->setInt32("buffer-id", info->mBufferID); 5589 5590 info->mData->meta()->clear(); 5591 notify->setBuffer("buffer", info->mData); 5592 5593 sp<AMessage> reply = new AMessage(kWhatInputBufferFilled, mCodec); 5594 reply->setInt32("buffer-id", info->mBufferID); 5595 5596 notify->setMessage("reply", reply); 5597 5598 notify->post(); 5599 5600 info->mStatus = BufferInfo::OWNED_BY_UPSTREAM; 5601} 5602 5603void ACodec::BaseState::onInputBufferFilled(const sp<AMessage> &msg) { 5604 IOMX::buffer_id bufferID; 5605 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 5606 sp<ABuffer> buffer; 5607 int32_t err = OK; 5608 bool eos = false; 5609 PortMode mode = getPortMode(kPortIndexInput); 5610 5611 if (!msg->findBuffer("buffer", &buffer)) { 5612 /* these are unfilled buffers returned by client */ 5613 CHECK(msg->findInt32("err", &err)); 5614 5615 if (err == OK) { 5616 /* buffers with no errors are returned on MediaCodec.flush */ 5617 mode = KEEP_BUFFERS; 5618 } else { 5619 ALOGV("[%s] saw error %d instead of an input buffer", 5620 mCodec->mComponentName.c_str(), err); 5621 eos = true; 5622 } 5623 5624 buffer.clear(); 5625 } 5626 5627 int32_t tmp; 5628 if (buffer != NULL && buffer->meta()->findInt32("eos", &tmp) && tmp) { 5629 eos = true; 5630 err = ERROR_END_OF_STREAM; 5631 } 5632 5633 BufferInfo *info = mCodec->findBufferByID(kPortIndexInput, bufferID); 5634 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5635 if (status != BufferInfo::OWNED_BY_UPSTREAM) { 5636 ALOGE("Wrong ownership in IBF: %s(%d) buffer #%u", _asString(status), status, bufferID); 5637 mCodec->dumpBuffers(kPortIndexInput); 5638 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5639 return; 5640 } 5641 5642 info->mStatus = BufferInfo::OWNED_BY_US; 5643 5644 switch (mode) { 5645 case KEEP_BUFFERS: 5646 { 5647 if (eos) { 5648 if (!mCodec->mPortEOS[kPortIndexInput]) { 5649 mCodec->mPortEOS[kPortIndexInput] = true; 5650 mCodec->mInputEOSResult = err; 5651 } 5652 } 5653 break; 5654 } 5655 5656 case RESUBMIT_BUFFERS: 5657 { 5658 if (buffer != NULL && !mCodec->mPortEOS[kPortIndexInput]) { 5659 // Do not send empty input buffer w/o EOS to the component. 5660 if (buffer->size() == 0 && !eos) { 5661 postFillThisBuffer(info); 5662 break; 5663 } 5664 5665 int64_t timeUs; 5666 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 5667 5668 OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME; 5669 5670 int32_t isCSD; 5671 if (buffer->meta()->findInt32("csd", &isCSD) && isCSD != 0) { 5672 flags |= OMX_BUFFERFLAG_CODECCONFIG; 5673 } 5674 5675 if (eos) { 5676 flags |= OMX_BUFFERFLAG_EOS; 5677 } 5678 5679 if (buffer != info->mCodecData) { 5680 ALOGV("[%s] Needs to copy input data for buffer %u. (%p != %p)", 5681 mCodec->mComponentName.c_str(), 5682 bufferID, 5683 buffer.get(), info->mCodecData.get()); 5684 5685 sp<DataConverter> converter = mCodec->mConverter[kPortIndexInput]; 5686 if (converter == NULL) { 5687 converter = getCopyConverter(); 5688 } 5689 status_t err = converter->convert(buffer, info->mCodecData); 5690 if (err != OK) { 5691 mCodec->signalError(OMX_ErrorUndefined, err); 5692 return; 5693 } 5694 } 5695 5696 if (flags & OMX_BUFFERFLAG_CODECCONFIG) { 5697 ALOGV("[%s] calling emptyBuffer %u w/ codec specific data", 5698 mCodec->mComponentName.c_str(), bufferID); 5699 } else if (flags & OMX_BUFFERFLAG_EOS) { 5700 ALOGV("[%s] calling emptyBuffer %u w/ EOS", 5701 mCodec->mComponentName.c_str(), bufferID); 5702 } else { 5703#if TRACK_BUFFER_TIMING 5704 ALOGI("[%s] calling emptyBuffer %u w/ time %lld us", 5705 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5706#else 5707 ALOGV("[%s] calling emptyBuffer %u w/ time %lld us", 5708 mCodec->mComponentName.c_str(), bufferID, (long long)timeUs); 5709#endif 5710 } 5711 5712#if TRACK_BUFFER_TIMING 5713 ACodec::BufferStats stats; 5714 stats.mEmptyBufferTimeUs = ALooper::GetNowUs(); 5715 stats.mFillBufferDoneTimeUs = -1ll; 5716 mCodec->mBufferStats.add(timeUs, stats); 5717#endif 5718 5719 if (mCodec->storingMetadataInDecodedBuffers()) { 5720 // try to submit an output buffer for each input buffer 5721 PortMode outputMode = getPortMode(kPortIndexOutput); 5722 5723 ALOGV("MetadataBuffersToSubmit=%u portMode=%s", 5724 mCodec->mMetadataBuffersToSubmit, 5725 (outputMode == FREE_BUFFERS ? "FREE" : 5726 outputMode == KEEP_BUFFERS ? "KEEP" : "RESUBMIT")); 5727 if (outputMode == RESUBMIT_BUFFERS) { 5728 mCodec->submitOutputMetadataBuffer(); 5729 } 5730 } 5731 info->checkReadFence("onInputBufferFilled"); 5732 status_t err2 = mCodec->mOMX->emptyBuffer( 5733 mCodec->mNode, 5734 bufferID, 5735 0, 5736 info->mCodecData->size(), 5737 flags, 5738 timeUs, 5739 info->mFenceFd); 5740 info->mFenceFd = -1; 5741 if (err2 != OK) { 5742 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5743 return; 5744 } 5745 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5746 5747 if (!eos && err == OK) { 5748 getMoreInputDataIfPossible(); 5749 } else { 5750 ALOGV("[%s] Signalled EOS (%d) on the input port", 5751 mCodec->mComponentName.c_str(), err); 5752 5753 mCodec->mPortEOS[kPortIndexInput] = true; 5754 mCodec->mInputEOSResult = err; 5755 } 5756 } else if (!mCodec->mPortEOS[kPortIndexInput]) { 5757 if (err != OK && err != ERROR_END_OF_STREAM) { 5758 ALOGV("[%s] Signalling EOS on the input port due to error %d", 5759 mCodec->mComponentName.c_str(), err); 5760 } else { 5761 ALOGV("[%s] Signalling EOS on the input port", 5762 mCodec->mComponentName.c_str()); 5763 } 5764 5765 ALOGV("[%s] calling emptyBuffer %u signalling EOS", 5766 mCodec->mComponentName.c_str(), bufferID); 5767 5768 info->checkReadFence("onInputBufferFilled"); 5769 status_t err2 = mCodec->mOMX->emptyBuffer( 5770 mCodec->mNode, 5771 bufferID, 5772 0, 5773 0, 5774 OMX_BUFFERFLAG_EOS, 5775 0, 5776 info->mFenceFd); 5777 info->mFenceFd = -1; 5778 if (err2 != OK) { 5779 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err2)); 5780 return; 5781 } 5782 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5783 5784 mCodec->mPortEOS[kPortIndexInput] = true; 5785 mCodec->mInputEOSResult = err; 5786 } 5787 break; 5788 } 5789 5790 case FREE_BUFFERS: 5791 break; 5792 5793 default: 5794 ALOGE("invalid port mode: %d", mode); 5795 break; 5796 } 5797} 5798 5799void ACodec::BaseState::getMoreInputDataIfPossible() { 5800 if (mCodec->mPortEOS[kPortIndexInput]) { 5801 return; 5802 } 5803 5804 BufferInfo *eligible = NULL; 5805 5806 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 5807 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 5808 5809#if 0 5810 if (info->mStatus == BufferInfo::OWNED_BY_UPSTREAM) { 5811 // There's already a "read" pending. 5812 return; 5813 } 5814#endif 5815 5816 if (info->mStatus == BufferInfo::OWNED_BY_US) { 5817 eligible = info; 5818 } 5819 } 5820 5821 if (eligible == NULL) { 5822 return; 5823 } 5824 5825 postFillThisBuffer(eligible); 5826} 5827 5828bool ACodec::BaseState::onOMXFillBufferDone( 5829 IOMX::buffer_id bufferID, 5830 size_t rangeOffset, size_t rangeLength, 5831 OMX_U32 flags, 5832 int64_t timeUs, 5833 int fenceFd) { 5834 ALOGV("[%s] onOMXFillBufferDone %u time %" PRId64 " us, flags = 0x%08x", 5835 mCodec->mComponentName.c_str(), bufferID, timeUs, flags); 5836 5837 ssize_t index; 5838 status_t err= OK; 5839 5840#if TRACK_BUFFER_TIMING 5841 index = mCodec->mBufferStats.indexOfKey(timeUs); 5842 if (index >= 0) { 5843 ACodec::BufferStats *stats = &mCodec->mBufferStats.editValueAt(index); 5844 stats->mFillBufferDoneTimeUs = ALooper::GetNowUs(); 5845 5846 ALOGI("frame PTS %lld: %lld", 5847 timeUs, 5848 stats->mFillBufferDoneTimeUs - stats->mEmptyBufferTimeUs); 5849 5850 mCodec->mBufferStats.removeItemsAt(index); 5851 stats = NULL; 5852 } 5853#endif 5854 5855 BufferInfo *info = 5856 mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 5857 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 5858 if (status != BufferInfo::OWNED_BY_COMPONENT) { 5859 ALOGE("Wrong ownership in FBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 5860 mCodec->dumpBuffers(kPortIndexOutput); 5861 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 5862 if (fenceFd >= 0) { 5863 ::close(fenceFd); 5864 } 5865 return true; 5866 } 5867 5868 info->mDequeuedAt = ++mCodec->mDequeueCounter; 5869 info->mStatus = BufferInfo::OWNED_BY_US; 5870 5871 if (info->mRenderInfo != NULL) { 5872 // The fence for an emptied buffer must have signaled, but there still could be queued 5873 // or out-of-order dequeued buffers in the render queue prior to this buffer. Drop these, 5874 // as we will soon requeue this buffer to the surface. While in theory we could still keep 5875 // track of buffers that are requeued to the surface, it is better to add support to the 5876 // buffer-queue to notify us of released buffers and their fences (in the future). 5877 mCodec->notifyOfRenderedFrames(true /* dropIncomplete */); 5878 } 5879 5880 // byte buffers cannot take fences, so wait for any fence now 5881 if (mCodec->mNativeWindow == NULL) { 5882 (void)mCodec->waitForFence(fenceFd, "onOMXFillBufferDone"); 5883 fenceFd = -1; 5884 } 5885 info->setReadFence(fenceFd, "onOMXFillBufferDone"); 5886 5887 PortMode mode = getPortMode(kPortIndexOutput); 5888 5889 switch (mode) { 5890 case KEEP_BUFFERS: 5891 break; 5892 5893 case RESUBMIT_BUFFERS: 5894 { 5895 if (rangeLength == 0 && (!(flags & OMX_BUFFERFLAG_EOS) 5896 || mCodec->mPortEOS[kPortIndexOutput])) { 5897 ALOGV("[%s] calling fillBuffer %u", 5898 mCodec->mComponentName.c_str(), info->mBufferID); 5899 5900 err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 5901 info->mFenceFd = -1; 5902 if (err != OK) { 5903 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5904 return true; 5905 } 5906 5907 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 5908 break; 5909 } 5910 5911 sp<AMessage> reply = 5912 new AMessage(kWhatOutputBufferDrained, mCodec); 5913 5914 if (mCodec->mOutputFormat != mCodec->mLastOutputFormat && rangeLength > 0) { 5915 // pretend that output format has changed on the first frame (we used to do this) 5916 if (mCodec->mBaseOutputFormat == mCodec->mOutputFormat) { 5917 mCodec->onOutputFormatChanged(mCodec->mOutputFormat); 5918 } 5919 mCodec->addKeyFormatChangesToRenderBufferNotification(reply); 5920 mCodec->sendFormatChange(); 5921 } 5922 5923 if (mCodec->usingMetadataOnEncoderOutput()) { 5924 native_handle_t *handle = NULL; 5925 VideoGrallocMetadata &grallocMeta = *(VideoGrallocMetadata *)info->mData->data(); 5926 VideoNativeMetadata &nativeMeta = *(VideoNativeMetadata *)info->mData->data(); 5927 if (info->mData->size() >= sizeof(grallocMeta) 5928 && grallocMeta.eType == kMetadataBufferTypeGrallocSource) { 5929 handle = (native_handle_t *)(uintptr_t)grallocMeta.pHandle; 5930 } else if (info->mData->size() >= sizeof(nativeMeta) 5931 && nativeMeta.eType == kMetadataBufferTypeANWBuffer) { 5932#ifdef OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS 5933 // ANativeWindowBuffer is only valid on 32-bit/mediaserver process 5934 handle = NULL; 5935#else 5936 handle = (native_handle_t *)nativeMeta.pBuffer->handle; 5937#endif 5938 } 5939 info->mData->meta()->setPointer("handle", handle); 5940 info->mData->meta()->setInt32("rangeOffset", rangeOffset); 5941 info->mData->meta()->setInt32("rangeLength", rangeLength); 5942 } else if (info->mData == info->mCodecData) { 5943 info->mData->setRange(rangeOffset, rangeLength); 5944 } else { 5945 info->mCodecData->setRange(rangeOffset, rangeLength); 5946 // in this case we know that mConverter is not null 5947 status_t err = mCodec->mConverter[kPortIndexOutput]->convert( 5948 info->mCodecData, info->mData); 5949 if (err != OK) { 5950 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5951 return true; 5952 } 5953 } 5954#if 0 5955 if (mCodec->mNativeWindow == NULL) { 5956 if (IsIDR(info->mData)) { 5957 ALOGI("IDR frame"); 5958 } 5959 } 5960#endif 5961 5962 if (mCodec->mSkipCutBuffer != NULL) { 5963 mCodec->mSkipCutBuffer->submit(info->mData); 5964 } 5965 info->mData->meta()->setInt64("timeUs", timeUs); 5966 5967 sp<AMessage> notify = mCodec->mNotify->dup(); 5968 notify->setInt32("what", CodecBase::kWhatDrainThisBuffer); 5969 notify->setInt32("buffer-id", info->mBufferID); 5970 notify->setBuffer("buffer", info->mData); 5971 notify->setInt32("flags", flags); 5972 5973 reply->setInt32("buffer-id", info->mBufferID); 5974 5975 notify->setMessage("reply", reply); 5976 5977 notify->post(); 5978 5979 info->mStatus = BufferInfo::OWNED_BY_DOWNSTREAM; 5980 5981 if (flags & OMX_BUFFERFLAG_EOS) { 5982 ALOGV("[%s] saw output EOS", mCodec->mComponentName.c_str()); 5983 5984 sp<AMessage> notify = mCodec->mNotify->dup(); 5985 notify->setInt32("what", CodecBase::kWhatEOS); 5986 notify->setInt32("err", mCodec->mInputEOSResult); 5987 notify->post(); 5988 5989 mCodec->mPortEOS[kPortIndexOutput] = true; 5990 } 5991 break; 5992 } 5993 5994 case FREE_BUFFERS: 5995 err = mCodec->freeBuffer(kPortIndexOutput, index); 5996 if (err != OK) { 5997 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 5998 return true; 5999 } 6000 break; 6001 6002 default: 6003 ALOGE("Invalid port mode: %d", mode); 6004 return false; 6005 } 6006 6007 return true; 6008} 6009 6010void ACodec::BaseState::onOutputBufferDrained(const sp<AMessage> &msg) { 6011 IOMX::buffer_id bufferID; 6012 CHECK(msg->findInt32("buffer-id", (int32_t*)&bufferID)); 6013 ssize_t index; 6014 BufferInfo *info = mCodec->findBufferByID(kPortIndexOutput, bufferID, &index); 6015 BufferInfo::Status status = BufferInfo::getSafeStatus(info); 6016 if (status != BufferInfo::OWNED_BY_DOWNSTREAM) { 6017 ALOGE("Wrong ownership in OBD: %s(%d) buffer #%u", _asString(status), status, bufferID); 6018 mCodec->dumpBuffers(kPortIndexOutput); 6019 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6020 return; 6021 } 6022 6023 android_native_rect_t crop; 6024 if (msg->findRect("crop", &crop.left, &crop.top, &crop.right, &crop.bottom)) { 6025 status_t err = native_window_set_crop(mCodec->mNativeWindow.get(), &crop); 6026 ALOGW_IF(err != NO_ERROR, "failed to set crop: %d", err); 6027 } 6028 6029 int32_t dataSpace; 6030 if (msg->findInt32("dataspace", &dataSpace)) { 6031 status_t err = native_window_set_buffers_data_space( 6032 mCodec->mNativeWindow.get(), (android_dataspace)dataSpace); 6033 ALOGW_IF(err != NO_ERROR, "failed to set dataspace: %d", err); 6034 } 6035 6036 int32_t render; 6037 if (mCodec->mNativeWindow != NULL 6038 && msg->findInt32("render", &render) && render != 0 6039 && info->mData != NULL && info->mData->size() != 0) { 6040 ATRACE_NAME("render"); 6041 // The client wants this buffer to be rendered. 6042 6043 // save buffers sent to the surface so we can get render time when they return 6044 int64_t mediaTimeUs = -1; 6045 info->mData->meta()->findInt64("timeUs", &mediaTimeUs); 6046 if (mediaTimeUs >= 0) { 6047 mCodec->mRenderTracker.onFrameQueued( 6048 mediaTimeUs, info->mGraphicBuffer, new Fence(::dup(info->mFenceFd))); 6049 } 6050 6051 int64_t timestampNs = 0; 6052 if (!msg->findInt64("timestampNs", ×tampNs)) { 6053 // use media timestamp if client did not request a specific render timestamp 6054 if (info->mData->meta()->findInt64("timeUs", ×tampNs)) { 6055 ALOGV("using buffer PTS of %lld", (long long)timestampNs); 6056 timestampNs *= 1000; 6057 } 6058 } 6059 6060 status_t err; 6061 err = native_window_set_buffers_timestamp(mCodec->mNativeWindow.get(), timestampNs); 6062 ALOGW_IF(err != NO_ERROR, "failed to set buffer timestamp: %d", err); 6063 6064 info->checkReadFence("onOutputBufferDrained before queueBuffer"); 6065 err = mCodec->mNativeWindow->queueBuffer( 6066 mCodec->mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd); 6067 info->mFenceFd = -1; 6068 if (err == OK) { 6069 info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW; 6070 } else { 6071 ALOGE("queueBuffer failed in onOutputBufferDrained: %d", err); 6072 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6073 info->mStatus = BufferInfo::OWNED_BY_US; 6074 // keeping read fence as write fence to avoid clobbering 6075 info->mIsReadFence = false; 6076 } 6077 } else { 6078 if (mCodec->mNativeWindow != NULL && 6079 (info->mData == NULL || info->mData->size() != 0)) { 6080 // move read fence into write fence to avoid clobbering 6081 info->mIsReadFence = false; 6082 ATRACE_NAME("frame-drop"); 6083 } 6084 info->mStatus = BufferInfo::OWNED_BY_US; 6085 } 6086 6087 PortMode mode = getPortMode(kPortIndexOutput); 6088 6089 switch (mode) { 6090 case KEEP_BUFFERS: 6091 { 6092 // XXX fishy, revisit!!! What about the FREE_BUFFERS case below? 6093 6094 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6095 // We cannot resubmit the buffer we just rendered, dequeue 6096 // the spare instead. 6097 6098 info = mCodec->dequeueBufferFromNativeWindow(); 6099 } 6100 break; 6101 } 6102 6103 case RESUBMIT_BUFFERS: 6104 { 6105 if (!mCodec->mPortEOS[kPortIndexOutput]) { 6106 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6107 // We cannot resubmit the buffer we just rendered, dequeue 6108 // the spare instead. 6109 6110 info = mCodec->dequeueBufferFromNativeWindow(); 6111 } 6112 6113 if (info != NULL) { 6114 ALOGV("[%s] calling fillBuffer %u", 6115 mCodec->mComponentName.c_str(), info->mBufferID); 6116 info->checkWriteFence("onOutputBufferDrained::RESUBMIT_BUFFERS"); 6117 status_t err = mCodec->mOMX->fillBuffer( 6118 mCodec->mNode, info->mBufferID, info->mFenceFd); 6119 info->mFenceFd = -1; 6120 if (err == OK) { 6121 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6122 } else { 6123 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6124 } 6125 } 6126 } 6127 break; 6128 } 6129 6130 case FREE_BUFFERS: 6131 { 6132 status_t err = mCodec->freeBuffer(kPortIndexOutput, index); 6133 if (err != OK) { 6134 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6135 } 6136 break; 6137 } 6138 6139 default: 6140 ALOGE("Invalid port mode: %d", mode); 6141 return; 6142 } 6143} 6144 6145//////////////////////////////////////////////////////////////////////////////// 6146 6147ACodec::UninitializedState::UninitializedState(ACodec *codec) 6148 : BaseState(codec) { 6149} 6150 6151void ACodec::UninitializedState::stateEntered() { 6152 ALOGV("Now uninitialized"); 6153 6154 if (mDeathNotifier != NULL) { 6155 mCodec->mNodeBinder->unlinkToDeath(mDeathNotifier); 6156 mDeathNotifier.clear(); 6157 } 6158 6159 mCodec->mUsingNativeWindow = false; 6160 mCodec->mNativeWindow.clear(); 6161 mCodec->mNativeWindowUsageBits = 0; 6162 mCodec->mNode = 0; 6163 mCodec->mOMX.clear(); 6164 mCodec->mQuirks = 0; 6165 mCodec->mFlags = 0; 6166 mCodec->mInputMetadataType = kMetadataBufferTypeInvalid; 6167 mCodec->mOutputMetadataType = kMetadataBufferTypeInvalid; 6168 mCodec->mConverter[0].clear(); 6169 mCodec->mConverter[1].clear(); 6170 mCodec->mComponentName.clear(); 6171} 6172 6173bool ACodec::UninitializedState::onMessageReceived(const sp<AMessage> &msg) { 6174 bool handled = false; 6175 6176 switch (msg->what()) { 6177 case ACodec::kWhatSetup: 6178 { 6179 onSetup(msg); 6180 6181 handled = true; 6182 break; 6183 } 6184 6185 case ACodec::kWhatAllocateComponent: 6186 { 6187 onAllocateComponent(msg); 6188 handled = true; 6189 break; 6190 } 6191 6192 case ACodec::kWhatShutdown: 6193 { 6194 int32_t keepComponentAllocated; 6195 CHECK(msg->findInt32( 6196 "keepComponentAllocated", &keepComponentAllocated)); 6197 ALOGW_IF(keepComponentAllocated, 6198 "cannot keep component allocated on shutdown in Uninitialized state"); 6199 6200 sp<AMessage> notify = mCodec->mNotify->dup(); 6201 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6202 notify->post(); 6203 6204 handled = true; 6205 break; 6206 } 6207 6208 case ACodec::kWhatFlush: 6209 { 6210 sp<AMessage> notify = mCodec->mNotify->dup(); 6211 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6212 notify->post(); 6213 6214 handled = true; 6215 break; 6216 } 6217 6218 case ACodec::kWhatReleaseCodecInstance: 6219 { 6220 // nothing to do, as we have already signaled shutdown 6221 handled = true; 6222 break; 6223 } 6224 6225 default: 6226 return BaseState::onMessageReceived(msg); 6227 } 6228 6229 return handled; 6230} 6231 6232void ACodec::UninitializedState::onSetup( 6233 const sp<AMessage> &msg) { 6234 if (onAllocateComponent(msg) 6235 && mCodec->mLoadedState->onConfigureComponent(msg)) { 6236 mCodec->mLoadedState->onStart(); 6237 } 6238} 6239 6240bool ACodec::UninitializedState::onAllocateComponent(const sp<AMessage> &msg) { 6241 ALOGV("onAllocateComponent"); 6242 6243 CHECK(mCodec->mNode == 0); 6244 6245 OMXClient client; 6246 if (client.connect() != OK) { 6247 mCodec->signalError(OMX_ErrorUndefined, NO_INIT); 6248 return false; 6249 } 6250 6251 sp<IOMX> omx = client.interface(); 6252 6253 sp<AMessage> notify = new AMessage(kWhatOMXDied, mCodec); 6254 6255 Vector<AString> matchingCodecs; 6256 6257 AString mime; 6258 6259 AString componentName; 6260 uint32_t quirks = 0; 6261 int32_t encoder = false; 6262 if (msg->findString("componentName", &componentName)) { 6263 sp<IMediaCodecList> list = MediaCodecList::getInstance(); 6264 if (list != NULL && list->findCodecByName(componentName.c_str()) >= 0) { 6265 matchingCodecs.add(componentName); 6266 } 6267 } else { 6268 CHECK(msg->findString("mime", &mime)); 6269 6270 if (!msg->findInt32("encoder", &encoder)) { 6271 encoder = false; 6272 } 6273 6274 MediaCodecList::findMatchingCodecs( 6275 mime.c_str(), 6276 encoder, // createEncoder 6277 0, // flags 6278 &matchingCodecs); 6279 } 6280 6281 sp<CodecObserver> observer = new CodecObserver; 6282 IOMX::node_id node = 0; 6283 6284 status_t err = NAME_NOT_FOUND; 6285 for (size_t matchIndex = 0; matchIndex < matchingCodecs.size(); 6286 ++matchIndex) { 6287 componentName = matchingCodecs[matchIndex]; 6288 quirks = MediaCodecList::getQuirksFor(componentName.c_str()); 6289 6290 pid_t tid = gettid(); 6291 int prevPriority = androidGetThreadPriority(tid); 6292 androidSetThreadPriority(tid, ANDROID_PRIORITY_FOREGROUND); 6293 err = omx->allocateNode(componentName.c_str(), observer, &mCodec->mNodeBinder, &node); 6294 androidSetThreadPriority(tid, prevPriority); 6295 6296 if (err == OK) { 6297 break; 6298 } else { 6299 ALOGW("Allocating component '%s' failed, try next one.", componentName.c_str()); 6300 } 6301 6302 node = 0; 6303 } 6304 6305 if (node == 0) { 6306 if (!mime.empty()) { 6307 ALOGE("Unable to instantiate a %scoder for type '%s' with err %#x.", 6308 encoder ? "en" : "de", mime.c_str(), err); 6309 } else { 6310 ALOGE("Unable to instantiate codec '%s' with err %#x.", componentName.c_str(), err); 6311 } 6312 6313 mCodec->signalError((OMX_ERRORTYPE)err, makeNoSideEffectStatus(err)); 6314 return false; 6315 } 6316 6317 mDeathNotifier = new DeathNotifier(notify); 6318 if (mCodec->mNodeBinder == NULL || 6319 mCodec->mNodeBinder->linkToDeath(mDeathNotifier) != OK) { 6320 // This was a local binder, if it dies so do we, we won't care 6321 // about any notifications in the afterlife. 6322 mDeathNotifier.clear(); 6323 } 6324 6325 notify = new AMessage(kWhatOMXMessageList, mCodec); 6326 observer->setNotificationMessage(notify); 6327 6328 mCodec->mComponentName = componentName; 6329 mCodec->mRenderTracker.setComponentName(componentName); 6330 mCodec->mFlags = 0; 6331 6332 if (componentName.endsWith(".secure")) { 6333 mCodec->mFlags |= kFlagIsSecure; 6334 mCodec->mFlags |= kFlagIsGrallocUsageProtected; 6335 mCodec->mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown; 6336 } 6337 6338 mCodec->mQuirks = quirks; 6339 mCodec->mOMX = omx; 6340 mCodec->mNode = node; 6341 6342 { 6343 sp<AMessage> notify = mCodec->mNotify->dup(); 6344 notify->setInt32("what", CodecBase::kWhatComponentAllocated); 6345 notify->setString("componentName", mCodec->mComponentName.c_str()); 6346 notify->post(); 6347 } 6348 6349 mCodec->changeState(mCodec->mLoadedState); 6350 6351 return true; 6352} 6353 6354//////////////////////////////////////////////////////////////////////////////// 6355 6356ACodec::LoadedState::LoadedState(ACodec *codec) 6357 : BaseState(codec) { 6358} 6359 6360void ACodec::LoadedState::stateEntered() { 6361 ALOGV("[%s] Now Loaded", mCodec->mComponentName.c_str()); 6362 6363 mCodec->mPortEOS[kPortIndexInput] = 6364 mCodec->mPortEOS[kPortIndexOutput] = false; 6365 6366 mCodec->mInputEOSResult = OK; 6367 6368 mCodec->mDequeueCounter = 0; 6369 mCodec->mMetadataBuffersToSubmit = 0; 6370 mCodec->mRepeatFrameDelayUs = -1ll; 6371 mCodec->mInputFormat.clear(); 6372 mCodec->mOutputFormat.clear(); 6373 mCodec->mBaseOutputFormat.clear(); 6374 6375 if (mCodec->mShutdownInProgress) { 6376 bool keepComponentAllocated = mCodec->mKeepComponentAllocated; 6377 6378 mCodec->mShutdownInProgress = false; 6379 mCodec->mKeepComponentAllocated = false; 6380 6381 onShutdown(keepComponentAllocated); 6382 } 6383 mCodec->mExplicitShutdown = false; 6384 6385 mCodec->processDeferredMessages(); 6386} 6387 6388void ACodec::LoadedState::onShutdown(bool keepComponentAllocated) { 6389 if (!keepComponentAllocated) { 6390 (void)mCodec->mOMX->freeNode(mCodec->mNode); 6391 6392 mCodec->changeState(mCodec->mUninitializedState); 6393 } 6394 6395 if (mCodec->mExplicitShutdown) { 6396 sp<AMessage> notify = mCodec->mNotify->dup(); 6397 notify->setInt32("what", CodecBase::kWhatShutdownCompleted); 6398 notify->post(); 6399 mCodec->mExplicitShutdown = false; 6400 } 6401} 6402 6403bool ACodec::LoadedState::onMessageReceived(const sp<AMessage> &msg) { 6404 bool handled = false; 6405 6406 switch (msg->what()) { 6407 case ACodec::kWhatConfigureComponent: 6408 { 6409 onConfigureComponent(msg); 6410 handled = true; 6411 break; 6412 } 6413 6414 case ACodec::kWhatCreateInputSurface: 6415 { 6416 onCreateInputSurface(msg); 6417 handled = true; 6418 break; 6419 } 6420 6421 case ACodec::kWhatSetInputSurface: 6422 { 6423 onSetInputSurface(msg); 6424 handled = true; 6425 break; 6426 } 6427 6428 case ACodec::kWhatStart: 6429 { 6430 onStart(); 6431 handled = true; 6432 break; 6433 } 6434 6435 case ACodec::kWhatShutdown: 6436 { 6437 int32_t keepComponentAllocated; 6438 CHECK(msg->findInt32( 6439 "keepComponentAllocated", &keepComponentAllocated)); 6440 6441 mCodec->mExplicitShutdown = true; 6442 onShutdown(keepComponentAllocated); 6443 6444 handled = true; 6445 break; 6446 } 6447 6448 case ACodec::kWhatFlush: 6449 { 6450 sp<AMessage> notify = mCodec->mNotify->dup(); 6451 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6452 notify->post(); 6453 6454 handled = true; 6455 break; 6456 } 6457 6458 default: 6459 return BaseState::onMessageReceived(msg); 6460 } 6461 6462 return handled; 6463} 6464 6465bool ACodec::LoadedState::onConfigureComponent( 6466 const sp<AMessage> &msg) { 6467 ALOGV("onConfigureComponent"); 6468 6469 CHECK(mCodec->mNode != 0); 6470 6471 status_t err = OK; 6472 AString mime; 6473 if (!msg->findString("mime", &mime)) { 6474 err = BAD_VALUE; 6475 } else { 6476 err = mCodec->configureCodec(mime.c_str(), msg); 6477 } 6478 if (err != OK) { 6479 ALOGE("[%s] configureCodec returning error %d", 6480 mCodec->mComponentName.c_str(), err); 6481 6482 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6483 return false; 6484 } 6485 6486 { 6487 sp<AMessage> notify = mCodec->mNotify->dup(); 6488 notify->setInt32("what", CodecBase::kWhatComponentConfigured); 6489 notify->setMessage("input-format", mCodec->mInputFormat); 6490 notify->setMessage("output-format", mCodec->mOutputFormat); 6491 notify->post(); 6492 } 6493 6494 return true; 6495} 6496 6497status_t ACodec::LoadedState::setupInputSurface() { 6498 status_t err = OK; 6499 6500 if (mCodec->mRepeatFrameDelayUs > 0ll) { 6501 err = mCodec->mOMX->setInternalOption( 6502 mCodec->mNode, 6503 kPortIndexInput, 6504 IOMX::INTERNAL_OPTION_REPEAT_PREVIOUS_FRAME_DELAY, 6505 &mCodec->mRepeatFrameDelayUs, 6506 sizeof(mCodec->mRepeatFrameDelayUs)); 6507 6508 if (err != OK) { 6509 ALOGE("[%s] Unable to configure option to repeat previous " 6510 "frames (err %d)", 6511 mCodec->mComponentName.c_str(), 6512 err); 6513 return err; 6514 } 6515 } 6516 6517 if (mCodec->mMaxPtsGapUs > 0ll) { 6518 err = mCodec->mOMX->setInternalOption( 6519 mCodec->mNode, 6520 kPortIndexInput, 6521 IOMX::INTERNAL_OPTION_MAX_TIMESTAMP_GAP, 6522 &mCodec->mMaxPtsGapUs, 6523 sizeof(mCodec->mMaxPtsGapUs)); 6524 6525 if (err != OK) { 6526 ALOGE("[%s] Unable to configure max timestamp gap (err %d)", 6527 mCodec->mComponentName.c_str(), 6528 err); 6529 return err; 6530 } 6531 } 6532 6533 if (mCodec->mMaxFps > 0) { 6534 err = mCodec->mOMX->setInternalOption( 6535 mCodec->mNode, 6536 kPortIndexInput, 6537 IOMX::INTERNAL_OPTION_MAX_FPS, 6538 &mCodec->mMaxFps, 6539 sizeof(mCodec->mMaxFps)); 6540 6541 if (err != OK) { 6542 ALOGE("[%s] Unable to configure max fps (err %d)", 6543 mCodec->mComponentName.c_str(), 6544 err); 6545 return err; 6546 } 6547 } 6548 6549 if (mCodec->mTimePerCaptureUs > 0ll 6550 && mCodec->mTimePerFrameUs > 0ll) { 6551 int64_t timeLapse[2]; 6552 timeLapse[0] = mCodec->mTimePerFrameUs; 6553 timeLapse[1] = mCodec->mTimePerCaptureUs; 6554 err = mCodec->mOMX->setInternalOption( 6555 mCodec->mNode, 6556 kPortIndexInput, 6557 IOMX::INTERNAL_OPTION_TIME_LAPSE, 6558 &timeLapse[0], 6559 sizeof(timeLapse)); 6560 6561 if (err != OK) { 6562 ALOGE("[%s] Unable to configure time lapse (err %d)", 6563 mCodec->mComponentName.c_str(), 6564 err); 6565 return err; 6566 } 6567 } 6568 6569 if (mCodec->mCreateInputBuffersSuspended) { 6570 bool suspend = true; 6571 err = mCodec->mOMX->setInternalOption( 6572 mCodec->mNode, 6573 kPortIndexInput, 6574 IOMX::INTERNAL_OPTION_SUSPEND, 6575 &suspend, 6576 sizeof(suspend)); 6577 6578 if (err != OK) { 6579 ALOGE("[%s] Unable to configure option to suspend (err %d)", 6580 mCodec->mComponentName.c_str(), 6581 err); 6582 return err; 6583 } 6584 } 6585 6586 uint32_t usageBits; 6587 if (mCodec->mOMX->getParameter( 6588 mCodec->mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits, 6589 &usageBits, sizeof(usageBits)) == OK) { 6590 mCodec->mInputFormat->setInt32( 6591 "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN)); 6592 } 6593 6594 sp<ABuffer> colorAspectsBuffer; 6595 if (mCodec->mInputFormat->findBuffer("android._color-aspects", &colorAspectsBuffer)) { 6596 err = mCodec->mOMX->setInternalOption( 6597 mCodec->mNode, kPortIndexInput, IOMX::INTERNAL_OPTION_COLOR_ASPECTS, 6598 colorAspectsBuffer->base(), colorAspectsBuffer->capacity()); 6599 if (err != OK) { 6600 ALOGE("[%s] Unable to configure color aspects (err %d)", 6601 mCodec->mComponentName.c_str(), err); 6602 return err; 6603 } 6604 } 6605 return OK; 6606} 6607 6608void ACodec::LoadedState::onCreateInputSurface( 6609 const sp<AMessage> & /* msg */) { 6610 ALOGV("onCreateInputSurface"); 6611 6612 sp<AMessage> notify = mCodec->mNotify->dup(); 6613 notify->setInt32("what", CodecBase::kWhatInputSurfaceCreated); 6614 6615 android_dataspace dataSpace; 6616 status_t err = 6617 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6618 notify->setMessage("input-format", mCodec->mInputFormat); 6619 notify->setMessage("output-format", mCodec->mOutputFormat); 6620 6621 sp<IGraphicBufferProducer> bufferProducer; 6622 if (err == OK) { 6623 err = mCodec->mOMX->createInputSurface( 6624 mCodec->mNode, kPortIndexInput, dataSpace, &bufferProducer, &mCodec->mInputMetadataType); 6625 } 6626 6627 if (err == OK) { 6628 err = setupInputSurface(); 6629 } 6630 6631 if (err == OK) { 6632 notify->setObject("input-surface", 6633 new BufferProducerWrapper(bufferProducer)); 6634 } else { 6635 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6636 // the error through because it's in the "configured" state. We 6637 // send a kWhatInputSurfaceCreated with an error value instead. 6638 ALOGE("[%s] onCreateInputSurface returning error %d", 6639 mCodec->mComponentName.c_str(), err); 6640 notify->setInt32("err", err); 6641 } 6642 notify->post(); 6643} 6644 6645void ACodec::LoadedState::onSetInputSurface( 6646 const sp<AMessage> &msg) { 6647 ALOGV("onSetInputSurface"); 6648 6649 sp<AMessage> notify = mCodec->mNotify->dup(); 6650 notify->setInt32("what", CodecBase::kWhatInputSurfaceAccepted); 6651 6652 sp<RefBase> obj; 6653 CHECK(msg->findObject("input-surface", &obj)); 6654 sp<PersistentSurface> surface = static_cast<PersistentSurface *>(obj.get()); 6655 6656 android_dataspace dataSpace; 6657 status_t err = 6658 mCodec->setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(&dataSpace); 6659 notify->setMessage("input-format", mCodec->mInputFormat); 6660 notify->setMessage("output-format", mCodec->mOutputFormat); 6661 6662 if (err == OK) { 6663 err = mCodec->mOMX->setInputSurface( 6664 mCodec->mNode, kPortIndexInput, surface->getBufferConsumer(), 6665 &mCodec->mInputMetadataType); 6666 } 6667 6668 if (err == OK) { 6669 surface->getBufferConsumer()->setDefaultBufferDataSpace(dataSpace); 6670 err = setupInputSurface(); 6671 } 6672 6673 if (err != OK) { 6674 // Can't use mCodec->signalError() here -- MediaCodec won't forward 6675 // the error through because it's in the "configured" state. We 6676 // send a kWhatInputSurfaceAccepted with an error value instead. 6677 ALOGE("[%s] onSetInputSurface returning error %d", 6678 mCodec->mComponentName.c_str(), err); 6679 notify->setInt32("err", err); 6680 } 6681 notify->post(); 6682} 6683 6684void ACodec::LoadedState::onStart() { 6685 ALOGV("onStart"); 6686 6687 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6688 if (err != OK) { 6689 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6690 } else { 6691 mCodec->changeState(mCodec->mLoadedToIdleState); 6692 } 6693} 6694 6695//////////////////////////////////////////////////////////////////////////////// 6696 6697ACodec::LoadedToIdleState::LoadedToIdleState(ACodec *codec) 6698 : BaseState(codec) { 6699} 6700 6701void ACodec::LoadedToIdleState::stateEntered() { 6702 ALOGV("[%s] Now Loaded->Idle", mCodec->mComponentName.c_str()); 6703 6704 status_t err; 6705 if ((err = allocateBuffers()) != OK) { 6706 ALOGE("Failed to allocate buffers after transitioning to IDLE state " 6707 "(error 0x%08x)", 6708 err); 6709 6710 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6711 6712 mCodec->mOMX->sendCommand( 6713 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 6714 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexInput)) { 6715 mCodec->freeBuffersOnPort(kPortIndexInput); 6716 } 6717 if (mCodec->allYourBuffersAreBelongToUs(kPortIndexOutput)) { 6718 mCodec->freeBuffersOnPort(kPortIndexOutput); 6719 } 6720 6721 mCodec->changeState(mCodec->mLoadedState); 6722 } 6723} 6724 6725status_t ACodec::LoadedToIdleState::allocateBuffers() { 6726 status_t err = mCodec->allocateBuffersOnPort(kPortIndexInput); 6727 6728 if (err != OK) { 6729 return err; 6730 } 6731 6732 return mCodec->allocateBuffersOnPort(kPortIndexOutput); 6733} 6734 6735bool ACodec::LoadedToIdleState::onMessageReceived(const sp<AMessage> &msg) { 6736 switch (msg->what()) { 6737 case kWhatSetParameters: 6738 case kWhatShutdown: 6739 { 6740 mCodec->deferMessage(msg); 6741 return true; 6742 } 6743 6744 case kWhatSignalEndOfInputStream: 6745 { 6746 mCodec->onSignalEndOfInputStream(); 6747 return true; 6748 } 6749 6750 case kWhatResume: 6751 { 6752 // We'll be active soon enough. 6753 return true; 6754 } 6755 6756 case kWhatFlush: 6757 { 6758 // We haven't even started yet, so we're flushed alright... 6759 sp<AMessage> notify = mCodec->mNotify->dup(); 6760 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6761 notify->post(); 6762 return true; 6763 } 6764 6765 default: 6766 return BaseState::onMessageReceived(msg); 6767 } 6768} 6769 6770bool ACodec::LoadedToIdleState::onOMXEvent( 6771 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6772 switch (event) { 6773 case OMX_EventCmdComplete: 6774 { 6775 status_t err = OK; 6776 if (data1 != (OMX_U32)OMX_CommandStateSet 6777 || data2 != (OMX_U32)OMX_StateIdle) { 6778 ALOGE("Unexpected command completion in LoadedToIdleState: %s(%u) %s(%u)", 6779 asString((OMX_COMMANDTYPE)data1), data1, 6780 asString((OMX_STATETYPE)data2), data2); 6781 err = FAILED_TRANSACTION; 6782 } 6783 6784 if (err == OK) { 6785 err = mCodec->mOMX->sendCommand( 6786 mCodec->mNode, OMX_CommandStateSet, OMX_StateExecuting); 6787 } 6788 6789 if (err != OK) { 6790 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 6791 } else { 6792 mCodec->changeState(mCodec->mIdleToExecutingState); 6793 } 6794 6795 return true; 6796 } 6797 6798 default: 6799 return BaseState::onOMXEvent(event, data1, data2); 6800 } 6801} 6802 6803//////////////////////////////////////////////////////////////////////////////// 6804 6805ACodec::IdleToExecutingState::IdleToExecutingState(ACodec *codec) 6806 : BaseState(codec) { 6807} 6808 6809void ACodec::IdleToExecutingState::stateEntered() { 6810 ALOGV("[%s] Now Idle->Executing", mCodec->mComponentName.c_str()); 6811} 6812 6813bool ACodec::IdleToExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6814 switch (msg->what()) { 6815 case kWhatSetParameters: 6816 case kWhatShutdown: 6817 { 6818 mCodec->deferMessage(msg); 6819 return true; 6820 } 6821 6822 case kWhatResume: 6823 { 6824 // We'll be active soon enough. 6825 return true; 6826 } 6827 6828 case kWhatFlush: 6829 { 6830 // We haven't even started yet, so we're flushed alright... 6831 sp<AMessage> notify = mCodec->mNotify->dup(); 6832 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 6833 notify->post(); 6834 6835 return true; 6836 } 6837 6838 case kWhatSignalEndOfInputStream: 6839 { 6840 mCodec->onSignalEndOfInputStream(); 6841 return true; 6842 } 6843 6844 default: 6845 return BaseState::onMessageReceived(msg); 6846 } 6847} 6848 6849bool ACodec::IdleToExecutingState::onOMXEvent( 6850 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 6851 switch (event) { 6852 case OMX_EventCmdComplete: 6853 { 6854 if (data1 != (OMX_U32)OMX_CommandStateSet 6855 || data2 != (OMX_U32)OMX_StateExecuting) { 6856 ALOGE("Unexpected command completion in IdleToExecutingState: %s(%u) %s(%u)", 6857 asString((OMX_COMMANDTYPE)data1), data1, 6858 asString((OMX_STATETYPE)data2), data2); 6859 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6860 return true; 6861 } 6862 6863 mCodec->mExecutingState->resume(); 6864 mCodec->changeState(mCodec->mExecutingState); 6865 6866 return true; 6867 } 6868 6869 default: 6870 return BaseState::onOMXEvent(event, data1, data2); 6871 } 6872} 6873 6874//////////////////////////////////////////////////////////////////////////////// 6875 6876ACodec::ExecutingState::ExecutingState(ACodec *codec) 6877 : BaseState(codec), 6878 mActive(false) { 6879} 6880 6881ACodec::BaseState::PortMode ACodec::ExecutingState::getPortMode( 6882 OMX_U32 /* portIndex */) { 6883 return RESUBMIT_BUFFERS; 6884} 6885 6886void ACodec::ExecutingState::submitOutputMetaBuffers() { 6887 // submit as many buffers as there are input buffers with the codec 6888 // in case we are in port reconfiguring 6889 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); ++i) { 6890 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6891 6892 if (info->mStatus == BufferInfo::OWNED_BY_COMPONENT) { 6893 if (mCodec->submitOutputMetadataBuffer() != OK) 6894 break; 6895 } 6896 } 6897 6898 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 6899 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 6900} 6901 6902void ACodec::ExecutingState::submitRegularOutputBuffers() { 6903 bool failed = false; 6904 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexOutput].size(); ++i) { 6905 BufferInfo *info = &mCodec->mBuffers[kPortIndexOutput].editItemAt(i); 6906 6907 if (mCodec->mNativeWindow != NULL) { 6908 if (info->mStatus != BufferInfo::OWNED_BY_US 6909 && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6910 ALOGE("buffers should be owned by us or the surface"); 6911 failed = true; 6912 break; 6913 } 6914 6915 if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) { 6916 continue; 6917 } 6918 } else { 6919 if (info->mStatus != BufferInfo::OWNED_BY_US) { 6920 ALOGE("buffers should be owned by us"); 6921 failed = true; 6922 break; 6923 } 6924 } 6925 6926 ALOGV("[%s] calling fillBuffer %u", mCodec->mComponentName.c_str(), info->mBufferID); 6927 6928 info->checkWriteFence("submitRegularOutputBuffers"); 6929 status_t err = mCodec->mOMX->fillBuffer(mCodec->mNode, info->mBufferID, info->mFenceFd); 6930 info->mFenceFd = -1; 6931 if (err != OK) { 6932 failed = true; 6933 break; 6934 } 6935 6936 info->mStatus = BufferInfo::OWNED_BY_COMPONENT; 6937 } 6938 6939 if (failed) { 6940 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 6941 } 6942} 6943 6944void ACodec::ExecutingState::submitOutputBuffers() { 6945 submitRegularOutputBuffers(); 6946 if (mCodec->storingMetadataInDecodedBuffers()) { 6947 submitOutputMetaBuffers(); 6948 } 6949} 6950 6951void ACodec::ExecutingState::resume() { 6952 if (mActive) { 6953 ALOGV("[%s] We're already active, no need to resume.", mCodec->mComponentName.c_str()); 6954 return; 6955 } 6956 6957 submitOutputBuffers(); 6958 6959 // Post all available input buffers 6960 if (mCodec->mBuffers[kPortIndexInput].size() == 0u) { 6961 ALOGW("[%s] we don't have any input buffers to resume", mCodec->mComponentName.c_str()); 6962 } 6963 6964 for (size_t i = 0; i < mCodec->mBuffers[kPortIndexInput].size(); i++) { 6965 BufferInfo *info = &mCodec->mBuffers[kPortIndexInput].editItemAt(i); 6966 if (info->mStatus == BufferInfo::OWNED_BY_US) { 6967 postFillThisBuffer(info); 6968 } 6969 } 6970 6971 mActive = true; 6972} 6973 6974void ACodec::ExecutingState::stateEntered() { 6975 ALOGV("[%s] Now Executing", mCodec->mComponentName.c_str()); 6976 6977 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 6978 mCodec->processDeferredMessages(); 6979} 6980 6981bool ACodec::ExecutingState::onMessageReceived(const sp<AMessage> &msg) { 6982 bool handled = false; 6983 6984 switch (msg->what()) { 6985 case kWhatShutdown: 6986 { 6987 int32_t keepComponentAllocated; 6988 CHECK(msg->findInt32( 6989 "keepComponentAllocated", &keepComponentAllocated)); 6990 6991 mCodec->mShutdownInProgress = true; 6992 mCodec->mExplicitShutdown = true; 6993 mCodec->mKeepComponentAllocated = keepComponentAllocated; 6994 6995 mActive = false; 6996 6997 status_t err = mCodec->mOMX->sendCommand( 6998 mCodec->mNode, OMX_CommandStateSet, OMX_StateIdle); 6999 if (err != OK) { 7000 if (keepComponentAllocated) { 7001 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7002 } 7003 // TODO: do some recovery here. 7004 } else { 7005 mCodec->changeState(mCodec->mExecutingToIdleState); 7006 } 7007 7008 handled = true; 7009 break; 7010 } 7011 7012 case kWhatFlush: 7013 { 7014 ALOGV("[%s] ExecutingState flushing now " 7015 "(codec owns %zu/%zu input, %zu/%zu output).", 7016 mCodec->mComponentName.c_str(), 7017 mCodec->countBuffersOwnedByComponent(kPortIndexInput), 7018 mCodec->mBuffers[kPortIndexInput].size(), 7019 mCodec->countBuffersOwnedByComponent(kPortIndexOutput), 7020 mCodec->mBuffers[kPortIndexOutput].size()); 7021 7022 mActive = false; 7023 7024 status_t err = mCodec->mOMX->sendCommand(mCodec->mNode, OMX_CommandFlush, OMX_ALL); 7025 if (err != OK) { 7026 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7027 } else { 7028 mCodec->changeState(mCodec->mFlushingState); 7029 } 7030 7031 handled = true; 7032 break; 7033 } 7034 7035 case kWhatResume: 7036 { 7037 resume(); 7038 7039 handled = true; 7040 break; 7041 } 7042 7043 case kWhatRequestIDRFrame: 7044 { 7045 status_t err = mCodec->requestIDRFrame(); 7046 if (err != OK) { 7047 ALOGW("Requesting an IDR frame failed."); 7048 } 7049 7050 handled = true; 7051 break; 7052 } 7053 7054 case kWhatSetParameters: 7055 { 7056 sp<AMessage> params; 7057 CHECK(msg->findMessage("params", ¶ms)); 7058 7059 status_t err = mCodec->setParameters(params); 7060 7061 sp<AMessage> reply; 7062 if (msg->findMessage("reply", &reply)) { 7063 reply->setInt32("err", err); 7064 reply->post(); 7065 } 7066 7067 handled = true; 7068 break; 7069 } 7070 7071 case ACodec::kWhatSignalEndOfInputStream: 7072 { 7073 mCodec->onSignalEndOfInputStream(); 7074 handled = true; 7075 break; 7076 } 7077 7078 // *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED *** 7079 case kWhatSubmitOutputMetadataBufferIfEOS: 7080 { 7081 if (mCodec->mPortEOS[kPortIndexInput] && 7082 !mCodec->mPortEOS[kPortIndexOutput]) { 7083 status_t err = mCodec->submitOutputMetadataBuffer(); 7084 if (err == OK) { 7085 mCodec->signalSubmitOutputMetadataBufferIfEOS_workaround(); 7086 } 7087 } 7088 return true; 7089 } 7090 7091 default: 7092 handled = BaseState::onMessageReceived(msg); 7093 break; 7094 } 7095 7096 return handled; 7097} 7098 7099status_t ACodec::setParameters(const sp<AMessage> ¶ms) { 7100 int32_t videoBitrate; 7101 if (params->findInt32("video-bitrate", &videoBitrate)) { 7102 OMX_VIDEO_CONFIG_BITRATETYPE configParams; 7103 InitOMXParams(&configParams); 7104 configParams.nPortIndex = kPortIndexOutput; 7105 configParams.nEncodeBitrate = videoBitrate; 7106 7107 status_t err = mOMX->setConfig( 7108 mNode, 7109 OMX_IndexConfigVideoBitrate, 7110 &configParams, 7111 sizeof(configParams)); 7112 7113 if (err != OK) { 7114 ALOGE("setConfig(OMX_IndexConfigVideoBitrate, %d) failed w/ err %d", 7115 videoBitrate, err); 7116 7117 return err; 7118 } 7119 } 7120 7121 int64_t skipFramesBeforeUs; 7122 if (params->findInt64("skip-frames-before", &skipFramesBeforeUs)) { 7123 status_t err = 7124 mOMX->setInternalOption( 7125 mNode, 7126 kPortIndexInput, 7127 IOMX::INTERNAL_OPTION_START_TIME, 7128 &skipFramesBeforeUs, 7129 sizeof(skipFramesBeforeUs)); 7130 7131 if (err != OK) { 7132 ALOGE("Failed to set parameter 'skip-frames-before' (err %d)", err); 7133 return err; 7134 } 7135 } 7136 7137 int32_t dropInputFrames; 7138 if (params->findInt32("drop-input-frames", &dropInputFrames)) { 7139 bool suspend = dropInputFrames != 0; 7140 7141 status_t err = 7142 mOMX->setInternalOption( 7143 mNode, 7144 kPortIndexInput, 7145 IOMX::INTERNAL_OPTION_SUSPEND, 7146 &suspend, 7147 sizeof(suspend)); 7148 7149 if (err != OK) { 7150 ALOGE("Failed to set parameter 'drop-input-frames' (err %d)", err); 7151 return err; 7152 } 7153 } 7154 7155 int32_t dummy; 7156 if (params->findInt32("request-sync", &dummy)) { 7157 status_t err = requestIDRFrame(); 7158 7159 if (err != OK) { 7160 ALOGE("Requesting a sync frame failed w/ err %d", err); 7161 return err; 7162 } 7163 } 7164 7165 float rate; 7166 if (params->findFloat("operating-rate", &rate) && rate > 0) { 7167 status_t err = setOperatingRate(rate, mIsVideo); 7168 if (err != OK) { 7169 ALOGE("Failed to set parameter 'operating-rate' (err %d)", err); 7170 return err; 7171 } 7172 } 7173 7174 int32_t intraRefreshPeriod = 0; 7175 if (params->findInt32("intra-refresh-period", &intraRefreshPeriod) 7176 && intraRefreshPeriod > 0) { 7177 status_t err = setIntraRefreshPeriod(intraRefreshPeriod, false); 7178 if (err != OK) { 7179 ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional", 7180 mComponentName.c_str()); 7181 err = OK; 7182 } 7183 } 7184 7185 return OK; 7186} 7187 7188void ACodec::onSignalEndOfInputStream() { 7189 sp<AMessage> notify = mNotify->dup(); 7190 notify->setInt32("what", CodecBase::kWhatSignaledInputEOS); 7191 7192 status_t err = mOMX->signalEndOfInputStream(mNode); 7193 if (err != OK) { 7194 notify->setInt32("err", err); 7195 } 7196 notify->post(); 7197} 7198 7199bool ACodec::ExecutingState::onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) { 7200 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7201 return true; 7202} 7203 7204bool ACodec::ExecutingState::onOMXEvent( 7205 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7206 switch (event) { 7207 case OMX_EventPortSettingsChanged: 7208 { 7209 CHECK_EQ(data1, (OMX_U32)kPortIndexOutput); 7210 7211 mCodec->onOutputFormatChanged(); 7212 7213 if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) { 7214 mCodec->mMetadataBuffersToSubmit = 0; 7215 CHECK_EQ(mCodec->mOMX->sendCommand( 7216 mCodec->mNode, 7217 OMX_CommandPortDisable, kPortIndexOutput), 7218 (status_t)OK); 7219 7220 mCodec->freeOutputBuffersNotOwnedByComponent(); 7221 7222 mCodec->changeState(mCodec->mOutputPortSettingsChangedState); 7223 } else if (data2 != OMX_IndexConfigCommonOutputCrop 7224 && data2 != OMX_IndexConfigAndroidIntraRefresh) { 7225 ALOGV("[%s] OMX_EventPortSettingsChanged 0x%08x", 7226 mCodec->mComponentName.c_str(), data2); 7227 } 7228 7229 return true; 7230 } 7231 7232 case OMX_EventBufferFlag: 7233 { 7234 return true; 7235 } 7236 7237 default: 7238 return BaseState::onOMXEvent(event, data1, data2); 7239 } 7240} 7241 7242//////////////////////////////////////////////////////////////////////////////// 7243 7244ACodec::OutputPortSettingsChangedState::OutputPortSettingsChangedState( 7245 ACodec *codec) 7246 : BaseState(codec) { 7247} 7248 7249ACodec::BaseState::PortMode ACodec::OutputPortSettingsChangedState::getPortMode( 7250 OMX_U32 portIndex) { 7251 if (portIndex == kPortIndexOutput) { 7252 return FREE_BUFFERS; 7253 } 7254 7255 CHECK_EQ(portIndex, (OMX_U32)kPortIndexInput); 7256 7257 return RESUBMIT_BUFFERS; 7258} 7259 7260bool ACodec::OutputPortSettingsChangedState::onMessageReceived( 7261 const sp<AMessage> &msg) { 7262 bool handled = false; 7263 7264 switch (msg->what()) { 7265 case kWhatFlush: 7266 case kWhatShutdown: 7267 case kWhatResume: 7268 case kWhatSetParameters: 7269 { 7270 if (msg->what() == kWhatResume) { 7271 ALOGV("[%s] Deferring resume", mCodec->mComponentName.c_str()); 7272 } 7273 7274 mCodec->deferMessage(msg); 7275 handled = true; 7276 break; 7277 } 7278 7279 default: 7280 handled = BaseState::onMessageReceived(msg); 7281 break; 7282 } 7283 7284 return handled; 7285} 7286 7287void ACodec::OutputPortSettingsChangedState::stateEntered() { 7288 ALOGV("[%s] Now handling output port settings change", 7289 mCodec->mComponentName.c_str()); 7290} 7291 7292bool ACodec::OutputPortSettingsChangedState::onOMXFrameRendered( 7293 int64_t mediaTimeUs, nsecs_t systemNano) { 7294 mCodec->onFrameRendered(mediaTimeUs, systemNano); 7295 return true; 7296} 7297 7298bool ACodec::OutputPortSettingsChangedState::onOMXEvent( 7299 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7300 switch (event) { 7301 case OMX_EventCmdComplete: 7302 { 7303 if (data1 == (OMX_U32)OMX_CommandPortDisable) { 7304 if (data2 != (OMX_U32)kPortIndexOutput) { 7305 ALOGW("ignoring EventCmdComplete CommandPortDisable for port %u", data2); 7306 return false; 7307 } 7308 7309 ALOGV("[%s] Output port now disabled.", mCodec->mComponentName.c_str()); 7310 7311 status_t err = OK; 7312 if (!mCodec->mBuffers[kPortIndexOutput].isEmpty()) { 7313 ALOGE("disabled port should be empty, but has %zu buffers", 7314 mCodec->mBuffers[kPortIndexOutput].size()); 7315 err = FAILED_TRANSACTION; 7316 } else { 7317 mCodec->mDealer[kPortIndexOutput].clear(); 7318 } 7319 7320 if (err == OK) { 7321 err = mCodec->mOMX->sendCommand( 7322 mCodec->mNode, OMX_CommandPortEnable, kPortIndexOutput); 7323 } 7324 7325 if (err == OK) { 7326 err = mCodec->allocateBuffersOnPort(kPortIndexOutput); 7327 ALOGE_IF(err != OK, "Failed to allocate output port buffers after port " 7328 "reconfiguration: (%d)", err); 7329 } 7330 7331 if (err != OK) { 7332 mCodec->signalError(OMX_ErrorUndefined, makeNoSideEffectStatus(err)); 7333 7334 // This is technically not correct, but appears to be 7335 // the only way to free the component instance. 7336 // Controlled transitioning from excecuting->idle 7337 // and idle->loaded seem impossible probably because 7338 // the output port never finishes re-enabling. 7339 mCodec->mShutdownInProgress = true; 7340 mCodec->mKeepComponentAllocated = false; 7341 mCodec->changeState(mCodec->mLoadedState); 7342 } 7343 7344 return true; 7345 } else if (data1 == (OMX_U32)OMX_CommandPortEnable) { 7346 if (data2 != (OMX_U32)kPortIndexOutput) { 7347 ALOGW("ignoring EventCmdComplete OMX_CommandPortEnable for port %u", data2); 7348 return false; 7349 } 7350 7351 ALOGV("[%s] Output port now reenabled.", mCodec->mComponentName.c_str()); 7352 7353 if (mCodec->mExecutingState->active()) { 7354 mCodec->mExecutingState->submitOutputBuffers(); 7355 } 7356 7357 mCodec->changeState(mCodec->mExecutingState); 7358 7359 return true; 7360 } 7361 7362 return false; 7363 } 7364 7365 default: 7366 return false; 7367 } 7368} 7369 7370//////////////////////////////////////////////////////////////////////////////// 7371 7372ACodec::ExecutingToIdleState::ExecutingToIdleState(ACodec *codec) 7373 : BaseState(codec), 7374 mComponentNowIdle(false) { 7375} 7376 7377bool ACodec::ExecutingToIdleState::onMessageReceived(const sp<AMessage> &msg) { 7378 bool handled = false; 7379 7380 switch (msg->what()) { 7381 case kWhatFlush: 7382 { 7383 // Don't send me a flush request if you previously wanted me 7384 // to shutdown. 7385 ALOGW("Ignoring flush request in ExecutingToIdleState"); 7386 break; 7387 } 7388 7389 case kWhatShutdown: 7390 { 7391 // We're already doing that... 7392 7393 handled = true; 7394 break; 7395 } 7396 7397 default: 7398 handled = BaseState::onMessageReceived(msg); 7399 break; 7400 } 7401 7402 return handled; 7403} 7404 7405void ACodec::ExecutingToIdleState::stateEntered() { 7406 ALOGV("[%s] Now Executing->Idle", mCodec->mComponentName.c_str()); 7407 7408 mComponentNowIdle = false; 7409 mCodec->mLastOutputFormat.clear(); 7410} 7411 7412bool ACodec::ExecutingToIdleState::onOMXEvent( 7413 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7414 switch (event) { 7415 case OMX_EventCmdComplete: 7416 { 7417 if (data1 != (OMX_U32)OMX_CommandStateSet 7418 || data2 != (OMX_U32)OMX_StateIdle) { 7419 ALOGE("Unexpected command completion in ExecutingToIdleState: %s(%u) %s(%u)", 7420 asString((OMX_COMMANDTYPE)data1), data1, 7421 asString((OMX_STATETYPE)data2), data2); 7422 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7423 return true; 7424 } 7425 7426 mComponentNowIdle = true; 7427 7428 changeStateIfWeOwnAllBuffers(); 7429 7430 return true; 7431 } 7432 7433 case OMX_EventPortSettingsChanged: 7434 case OMX_EventBufferFlag: 7435 { 7436 // We're shutting down and don't care about this anymore. 7437 return true; 7438 } 7439 7440 default: 7441 return BaseState::onOMXEvent(event, data1, data2); 7442 } 7443} 7444 7445void ACodec::ExecutingToIdleState::changeStateIfWeOwnAllBuffers() { 7446 if (mComponentNowIdle && mCodec->allYourBuffersAreBelongToUs()) { 7447 status_t err = mCodec->mOMX->sendCommand( 7448 mCodec->mNode, OMX_CommandStateSet, OMX_StateLoaded); 7449 if (err == OK) { 7450 err = mCodec->freeBuffersOnPort(kPortIndexInput); 7451 status_t err2 = mCodec->freeBuffersOnPort(kPortIndexOutput); 7452 if (err == OK) { 7453 err = err2; 7454 } 7455 } 7456 7457 if ((mCodec->mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) 7458 && mCodec->mNativeWindow != NULL) { 7459 // We push enough 1x1 blank buffers to ensure that one of 7460 // them has made it to the display. This allows the OMX 7461 // component teardown to zero out any protected buffers 7462 // without the risk of scanning out one of those buffers. 7463 pushBlankBuffersToNativeWindow(mCodec->mNativeWindow.get()); 7464 } 7465 7466 if (err != OK) { 7467 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7468 return; 7469 } 7470 7471 mCodec->changeState(mCodec->mIdleToLoadedState); 7472 } 7473} 7474 7475void ACodec::ExecutingToIdleState::onInputBufferFilled( 7476 const sp<AMessage> &msg) { 7477 BaseState::onInputBufferFilled(msg); 7478 7479 changeStateIfWeOwnAllBuffers(); 7480} 7481 7482void ACodec::ExecutingToIdleState::onOutputBufferDrained( 7483 const sp<AMessage> &msg) { 7484 BaseState::onOutputBufferDrained(msg); 7485 7486 changeStateIfWeOwnAllBuffers(); 7487} 7488 7489//////////////////////////////////////////////////////////////////////////////// 7490 7491ACodec::IdleToLoadedState::IdleToLoadedState(ACodec *codec) 7492 : BaseState(codec) { 7493} 7494 7495bool ACodec::IdleToLoadedState::onMessageReceived(const sp<AMessage> &msg) { 7496 bool handled = false; 7497 7498 switch (msg->what()) { 7499 case kWhatShutdown: 7500 { 7501 // We're already doing that... 7502 7503 handled = true; 7504 break; 7505 } 7506 7507 case kWhatFlush: 7508 { 7509 // Don't send me a flush request if you previously wanted me 7510 // to shutdown. 7511 ALOGE("Got flush request in IdleToLoadedState"); 7512 break; 7513 } 7514 7515 default: 7516 handled = BaseState::onMessageReceived(msg); 7517 break; 7518 } 7519 7520 return handled; 7521} 7522 7523void ACodec::IdleToLoadedState::stateEntered() { 7524 ALOGV("[%s] Now Idle->Loaded", mCodec->mComponentName.c_str()); 7525} 7526 7527bool ACodec::IdleToLoadedState::onOMXEvent( 7528 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7529 switch (event) { 7530 case OMX_EventCmdComplete: 7531 { 7532 if (data1 != (OMX_U32)OMX_CommandStateSet 7533 || data2 != (OMX_U32)OMX_StateLoaded) { 7534 ALOGE("Unexpected command completion in IdleToLoadedState: %s(%u) %s(%u)", 7535 asString((OMX_COMMANDTYPE)data1), data1, 7536 asString((OMX_STATETYPE)data2), data2); 7537 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7538 return true; 7539 } 7540 7541 mCodec->changeState(mCodec->mLoadedState); 7542 7543 return true; 7544 } 7545 7546 default: 7547 return BaseState::onOMXEvent(event, data1, data2); 7548 } 7549} 7550 7551//////////////////////////////////////////////////////////////////////////////// 7552 7553ACodec::FlushingState::FlushingState(ACodec *codec) 7554 : BaseState(codec) { 7555} 7556 7557void ACodec::FlushingState::stateEntered() { 7558 ALOGV("[%s] Now Flushing", mCodec->mComponentName.c_str()); 7559 7560 mFlushComplete[kPortIndexInput] = mFlushComplete[kPortIndexOutput] = false; 7561} 7562 7563bool ACodec::FlushingState::onMessageReceived(const sp<AMessage> &msg) { 7564 bool handled = false; 7565 7566 switch (msg->what()) { 7567 case kWhatShutdown: 7568 { 7569 mCodec->deferMessage(msg); 7570 break; 7571 } 7572 7573 case kWhatFlush: 7574 { 7575 // We're already doing this right now. 7576 handled = true; 7577 break; 7578 } 7579 7580 default: 7581 handled = BaseState::onMessageReceived(msg); 7582 break; 7583 } 7584 7585 return handled; 7586} 7587 7588bool ACodec::FlushingState::onOMXEvent( 7589 OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) { 7590 ALOGV("[%s] FlushingState onOMXEvent(%u,%d)", 7591 mCodec->mComponentName.c_str(), event, (OMX_S32)data1); 7592 7593 switch (event) { 7594 case OMX_EventCmdComplete: 7595 { 7596 if (data1 != (OMX_U32)OMX_CommandFlush) { 7597 ALOGE("unexpected EventCmdComplete %s(%d) data2:%d in FlushingState", 7598 asString((OMX_COMMANDTYPE)data1), data1, data2); 7599 mCodec->signalError(OMX_ErrorUndefined, FAILED_TRANSACTION); 7600 return true; 7601 } 7602 7603 if (data2 == kPortIndexInput || data2 == kPortIndexOutput) { 7604 if (mFlushComplete[data2]) { 7605 ALOGW("Flush already completed for %s port", 7606 data2 == kPortIndexInput ? "input" : "output"); 7607 return true; 7608 } 7609 mFlushComplete[data2] = true; 7610 7611 if (mFlushComplete[kPortIndexInput] && mFlushComplete[kPortIndexOutput]) { 7612 changeStateIfWeOwnAllBuffers(); 7613 } 7614 } else if (data2 == OMX_ALL) { 7615 if (!mFlushComplete[kPortIndexInput] || !mFlushComplete[kPortIndexOutput]) { 7616 ALOGW("received flush complete event for OMX_ALL before ports have been" 7617 "flushed (%d/%d)", 7618 mFlushComplete[kPortIndexInput], mFlushComplete[kPortIndexOutput]); 7619 return false; 7620 } 7621 7622 changeStateIfWeOwnAllBuffers(); 7623 } else { 7624 ALOGW("data2 not OMX_ALL but %u in EventCmdComplete CommandFlush", data2); 7625 } 7626 7627 return true; 7628 } 7629 7630 case OMX_EventPortSettingsChanged: 7631 { 7632 sp<AMessage> msg = new AMessage(kWhatOMXMessage, mCodec); 7633 msg->setInt32("type", omx_message::EVENT); 7634 msg->setInt32("node", mCodec->mNode); 7635 msg->setInt32("event", event); 7636 msg->setInt32("data1", data1); 7637 msg->setInt32("data2", data2); 7638 7639 ALOGV("[%s] Deferring OMX_EventPortSettingsChanged", 7640 mCodec->mComponentName.c_str()); 7641 7642 mCodec->deferMessage(msg); 7643 7644 return true; 7645 } 7646 7647 default: 7648 return BaseState::onOMXEvent(event, data1, data2); 7649 } 7650 7651 return true; 7652} 7653 7654void ACodec::FlushingState::onOutputBufferDrained(const sp<AMessage> &msg) { 7655 BaseState::onOutputBufferDrained(msg); 7656 7657 changeStateIfWeOwnAllBuffers(); 7658} 7659 7660void ACodec::FlushingState::onInputBufferFilled(const sp<AMessage> &msg) { 7661 BaseState::onInputBufferFilled(msg); 7662 7663 changeStateIfWeOwnAllBuffers(); 7664} 7665 7666void ACodec::FlushingState::changeStateIfWeOwnAllBuffers() { 7667 if (mFlushComplete[kPortIndexInput] 7668 && mFlushComplete[kPortIndexOutput] 7669 && mCodec->allYourBuffersAreBelongToUs()) { 7670 // We now own all buffers except possibly those still queued with 7671 // the native window for rendering. Let's get those back as well. 7672 mCodec->waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs(); 7673 7674 mCodec->mRenderTracker.clear(systemTime(CLOCK_MONOTONIC)); 7675 7676 sp<AMessage> notify = mCodec->mNotify->dup(); 7677 notify->setInt32("what", CodecBase::kWhatFlushCompleted); 7678 notify->post(); 7679 7680 mCodec->mPortEOS[kPortIndexInput] = 7681 mCodec->mPortEOS[kPortIndexOutput] = false; 7682 7683 mCodec->mInputEOSResult = OK; 7684 7685 if (mCodec->mSkipCutBuffer != NULL) { 7686 mCodec->mSkipCutBuffer->clear(); 7687 } 7688 7689 mCodec->changeState(mCodec->mExecutingState); 7690 } 7691} 7692 7693status_t ACodec::queryCapabilities( 7694 const AString &name, const AString &mime, bool isEncoder, 7695 sp<MediaCodecInfo::Capabilities> *caps) { 7696 (*caps).clear(); 7697 const char *role = getComponentRole(isEncoder, mime.c_str()); 7698 if (role == NULL) { 7699 return BAD_VALUE; 7700 } 7701 7702 OMXClient client; 7703 status_t err = client.connect(); 7704 if (err != OK) { 7705 return err; 7706 } 7707 7708 sp<IOMX> omx = client.interface(); 7709 sp<CodecObserver> observer = new CodecObserver; 7710 IOMX::node_id node = 0; 7711 7712 err = omx->allocateNode(name.c_str(), observer, NULL, &node); 7713 if (err != OK) { 7714 client.disconnect(); 7715 return err; 7716 } 7717 7718 err = setComponentRole(omx, node, role); 7719 if (err != OK) { 7720 omx->freeNode(node); 7721 client.disconnect(); 7722 return err; 7723 } 7724 7725 sp<MediaCodecInfo::CapabilitiesBuilder> builder = new MediaCodecInfo::CapabilitiesBuilder(); 7726 bool isVideo = mime.startsWithIgnoreCase("video/"); 7727 7728 if (isVideo) { 7729 OMX_VIDEO_PARAM_PROFILELEVELTYPE param; 7730 InitOMXParams(¶m); 7731 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7732 7733 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7734 param.nProfileIndex = index; 7735 status_t err = omx->getParameter( 7736 node, OMX_IndexParamVideoProfileLevelQuerySupported, 7737 ¶m, sizeof(param)); 7738 if (err != OK) { 7739 break; 7740 } 7741 builder->addProfileLevel(param.eProfile, param.eLevel); 7742 7743 if (index == kMaxIndicesToCheck) { 7744 ALOGW("[%s] stopping checking profiles after %u: %x/%x", 7745 name.c_str(), index, 7746 param.eProfile, param.eLevel); 7747 } 7748 } 7749 7750 // Color format query 7751 // return colors in the order reported by the OMX component 7752 // prefix "flexible" standard ones with the flexible equivalent 7753 OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat; 7754 InitOMXParams(&portFormat); 7755 portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput; 7756 Vector<uint32_t> supportedColors; // shadow copy to check for duplicates 7757 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7758 portFormat.nIndex = index; 7759 status_t err = omx->getParameter( 7760 node, OMX_IndexParamVideoPortFormat, 7761 &portFormat, sizeof(portFormat)); 7762 if (err != OK) { 7763 break; 7764 } 7765 7766 OMX_U32 flexibleEquivalent; 7767 if (isFlexibleColorFormat( 7768 omx, node, portFormat.eColorFormat, false /* usingNativeWindow */, 7769 &flexibleEquivalent)) { 7770 bool marked = false; 7771 for (size_t i = 0; i < supportedColors.size(); ++i) { 7772 if (supportedColors[i] == flexibleEquivalent) { 7773 marked = true; 7774 break; 7775 } 7776 } 7777 if (!marked) { 7778 supportedColors.push(flexibleEquivalent); 7779 builder->addColorFormat(flexibleEquivalent); 7780 } 7781 } 7782 supportedColors.push(portFormat.eColorFormat); 7783 builder->addColorFormat(portFormat.eColorFormat); 7784 7785 if (index == kMaxIndicesToCheck) { 7786 ALOGW("[%s] stopping checking formats after %u: %s(%x)", 7787 name.c_str(), index, 7788 asString(portFormat.eColorFormat), portFormat.eColorFormat); 7789 } 7790 } 7791 } else if (mime.equalsIgnoreCase(MEDIA_MIMETYPE_AUDIO_AAC)) { 7792 // More audio codecs if they have profiles. 7793 OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param; 7794 InitOMXParams(¶m); 7795 param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput; 7796 for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) { 7797 param.nProfileIndex = index; 7798 status_t err = omx->getParameter( 7799 node, (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported, 7800 ¶m, sizeof(param)); 7801 if (err != OK) { 7802 break; 7803 } 7804 // For audio, level is ignored. 7805 builder->addProfileLevel(param.eProfile, 0 /* level */); 7806 7807 if (index == kMaxIndicesToCheck) { 7808 ALOGW("[%s] stopping checking profiles after %u: %x", 7809 name.c_str(), index, 7810 param.eProfile); 7811 } 7812 } 7813 7814 // NOTE: Without Android extensions, OMX does not provide a way to query 7815 // AAC profile support 7816 if (param.nProfileIndex == 0) { 7817 ALOGW("component %s doesn't support profile query.", name.c_str()); 7818 } 7819 } 7820 7821 if (isVideo && !isEncoder) { 7822 native_handle_t *sidebandHandle = NULL; 7823 if (omx->configureVideoTunnelMode( 7824 node, kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) { 7825 // tunneled playback includes adaptive playback 7826 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback 7827 | MediaCodecInfo::Capabilities::kFlagSupportsTunneledPlayback); 7828 } else if (omx->storeMetaDataInBuffers( 7829 node, kPortIndexOutput, OMX_TRUE) == OK || 7830 omx->prepareForAdaptivePlayback( 7831 node, kPortIndexOutput, OMX_TRUE, 7832 1280 /* width */, 720 /* height */) == OK) { 7833 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsAdaptivePlayback); 7834 } 7835 } 7836 7837 if (isVideo && isEncoder) { 7838 OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params; 7839 InitOMXParams(¶ms); 7840 params.nPortIndex = kPortIndexOutput; 7841 // TODO: should we verify if fallback is supported? 7842 if (omx->getConfig( 7843 node, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, 7844 ¶ms, sizeof(params)) == OK) { 7845 builder->addFlags(MediaCodecInfo::Capabilities::kFlagSupportsIntraRefresh); 7846 } 7847 } 7848 7849 *caps = builder; 7850 omx->freeNode(node); 7851 client.disconnect(); 7852 return OK; 7853} 7854 7855// These are supposed be equivalent to the logic in 7856// "audio_channel_out_mask_from_count". 7857//static 7858status_t ACodec::getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) { 7859 switch (numChannels) { 7860 case 1: 7861 map[0] = OMX_AUDIO_ChannelCF; 7862 break; 7863 case 2: 7864 map[0] = OMX_AUDIO_ChannelLF; 7865 map[1] = OMX_AUDIO_ChannelRF; 7866 break; 7867 case 3: 7868 map[0] = OMX_AUDIO_ChannelLF; 7869 map[1] = OMX_AUDIO_ChannelRF; 7870 map[2] = OMX_AUDIO_ChannelCF; 7871 break; 7872 case 4: 7873 map[0] = OMX_AUDIO_ChannelLF; 7874 map[1] = OMX_AUDIO_ChannelRF; 7875 map[2] = OMX_AUDIO_ChannelLR; 7876 map[3] = OMX_AUDIO_ChannelRR; 7877 break; 7878 case 5: 7879 map[0] = OMX_AUDIO_ChannelLF; 7880 map[1] = OMX_AUDIO_ChannelRF; 7881 map[2] = OMX_AUDIO_ChannelCF; 7882 map[3] = OMX_AUDIO_ChannelLR; 7883 map[4] = OMX_AUDIO_ChannelRR; 7884 break; 7885 case 6: 7886 map[0] = OMX_AUDIO_ChannelLF; 7887 map[1] = OMX_AUDIO_ChannelRF; 7888 map[2] = OMX_AUDIO_ChannelCF; 7889 map[3] = OMX_AUDIO_ChannelLFE; 7890 map[4] = OMX_AUDIO_ChannelLR; 7891 map[5] = OMX_AUDIO_ChannelRR; 7892 break; 7893 case 7: 7894 map[0] = OMX_AUDIO_ChannelLF; 7895 map[1] = OMX_AUDIO_ChannelRF; 7896 map[2] = OMX_AUDIO_ChannelCF; 7897 map[3] = OMX_AUDIO_ChannelLFE; 7898 map[4] = OMX_AUDIO_ChannelLR; 7899 map[5] = OMX_AUDIO_ChannelRR; 7900 map[6] = OMX_AUDIO_ChannelCS; 7901 break; 7902 case 8: 7903 map[0] = OMX_AUDIO_ChannelLF; 7904 map[1] = OMX_AUDIO_ChannelRF; 7905 map[2] = OMX_AUDIO_ChannelCF; 7906 map[3] = OMX_AUDIO_ChannelLFE; 7907 map[4] = OMX_AUDIO_ChannelLR; 7908 map[5] = OMX_AUDIO_ChannelRR; 7909 map[6] = OMX_AUDIO_ChannelLS; 7910 map[7] = OMX_AUDIO_ChannelRS; 7911 break; 7912 default: 7913 return -EINVAL; 7914 } 7915 7916 return OK; 7917} 7918 7919} // namespace android 7920