PlaybackSession.cpp revision 7bc2ffca12828d72aaeeace0891183dc547877c0
1/* 2 * Copyright 2012, The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "PlaybackSession" 19#include <utils/Log.h> 20 21#include "PlaybackSession.h" 22 23#include "Converter.h" 24#include "MediaPuller.h" 25#include "RepeaterSource.h" 26#include "Sender.h" 27#include "TSPacketizer.h" 28#include "include/avc_utils.h" 29#include "WifiDisplaySource.h" 30 31#include <binder/IServiceManager.h> 32#include <gui/ISurfaceComposer.h> 33#include <gui/SurfaceComposerClient.h> 34#include <media/IHDCP.h> 35#include <media/stagefright/foundation/ABitReader.h> 36#include <media/stagefright/foundation/ABuffer.h> 37#include <media/stagefright/foundation/ADebug.h> 38#include <media/stagefright/foundation/AMessage.h> 39#include <media/stagefright/foundation/hexdump.h> 40#include <media/stagefright/AudioSource.h> 41#include <media/stagefright/DataSource.h> 42#include <media/stagefright/MediaDefs.h> 43#include <media/stagefright/MediaErrors.h> 44#include <media/stagefright/MediaExtractor.h> 45#include <media/stagefright/MediaSource.h> 46#include <media/stagefright/MetaData.h> 47#include <media/stagefright/MPEG2TSWriter.h> 48#include <media/stagefright/SurfaceMediaSource.h> 49#include <media/stagefright/Utils.h> 50 51#include <OMX_IVCommon.h> 52 53namespace android { 54 55struct WifiDisplaySource::PlaybackSession::Track : public AHandler { 56 enum { 57 kWhatStopped, 58 }; 59 60 Track(const sp<AMessage> ¬ify, 61 const sp<ALooper> &pullLooper, 62 const sp<ALooper> &codecLooper, 63 const sp<MediaPuller> &mediaPuller, 64 const sp<Converter> &converter); 65 66 void setRepeaterSource(const sp<RepeaterSource> &source); 67 68 sp<AMessage> getFormat(); 69 bool isAudio() const; 70 71 const sp<Converter> &converter() const; 72 ssize_t packetizerTrackIndex() const; 73 74 void setPacketizerTrackIndex(size_t index); 75 76 status_t start(); 77 void stopAsync(); 78 79 void pause(); 80 void resume(); 81 82 void queueAccessUnit(const sp<ABuffer> &accessUnit); 83 sp<ABuffer> dequeueAccessUnit(); 84 85 bool hasOutputBuffer(int64_t *timeUs) const; 86 void queueOutputBuffer(const sp<ABuffer> &accessUnit); 87 sp<ABuffer> dequeueOutputBuffer(); 88 89#if SUSPEND_VIDEO_IF_IDLE 90 bool isSuspended() const; 91#endif 92 93 size_t countQueuedOutputBuffers() const { 94 return mQueuedOutputBuffers.size(); 95 } 96 97 void requestIDRFrame(); 98 99protected: 100 virtual void onMessageReceived(const sp<AMessage> &msg); 101 virtual ~Track(); 102 103private: 104 enum { 105 kWhatMediaPullerStopped, 106 }; 107 108 sp<AMessage> mNotify; 109 sp<ALooper> mPullLooper; 110 sp<ALooper> mCodecLooper; 111 sp<MediaPuller> mMediaPuller; 112 sp<Converter> mConverter; 113 bool mStarted; 114 ssize_t mPacketizerTrackIndex; 115 bool mIsAudio; 116 List<sp<ABuffer> > mQueuedAccessUnits; 117 sp<RepeaterSource> mRepeaterSource; 118 List<sp<ABuffer> > mQueuedOutputBuffers; 119 int64_t mLastOutputBufferQueuedTimeUs; 120 121 static bool IsAudioFormat(const sp<AMessage> &format); 122 123 DISALLOW_EVIL_CONSTRUCTORS(Track); 124}; 125 126WifiDisplaySource::PlaybackSession::Track::Track( 127 const sp<AMessage> ¬ify, 128 const sp<ALooper> &pullLooper, 129 const sp<ALooper> &codecLooper, 130 const sp<MediaPuller> &mediaPuller, 131 const sp<Converter> &converter) 132 : mNotify(notify), 133 mPullLooper(pullLooper), 134 mCodecLooper(codecLooper), 135 mMediaPuller(mediaPuller), 136 mConverter(converter), 137 mStarted(false), 138 mPacketizerTrackIndex(-1), 139 mIsAudio(IsAudioFormat(mConverter->getOutputFormat())), 140 mLastOutputBufferQueuedTimeUs(-1ll) { 141} 142 143WifiDisplaySource::PlaybackSession::Track::~Track() { 144 CHECK(!mStarted); 145} 146 147// static 148bool WifiDisplaySource::PlaybackSession::Track::IsAudioFormat( 149 const sp<AMessage> &format) { 150 AString mime; 151 CHECK(format->findString("mime", &mime)); 152 153 return !strncasecmp(mime.c_str(), "audio/", 6); 154} 155 156sp<AMessage> WifiDisplaySource::PlaybackSession::Track::getFormat() { 157 return mConverter->getOutputFormat(); 158} 159 160bool WifiDisplaySource::PlaybackSession::Track::isAudio() const { 161 return mIsAudio; 162} 163 164const sp<Converter> &WifiDisplaySource::PlaybackSession::Track::converter() const { 165 return mConverter; 166} 167 168ssize_t WifiDisplaySource::PlaybackSession::Track::packetizerTrackIndex() const { 169 return mPacketizerTrackIndex; 170} 171 172void WifiDisplaySource::PlaybackSession::Track::setPacketizerTrackIndex(size_t index) { 173 CHECK_LT(mPacketizerTrackIndex, 0); 174 mPacketizerTrackIndex = index; 175} 176 177status_t WifiDisplaySource::PlaybackSession::Track::start() { 178 ALOGV("Track::start isAudio=%d", mIsAudio); 179 180 CHECK(!mStarted); 181 182 status_t err = OK; 183 184 if (mMediaPuller != NULL) { 185 err = mMediaPuller->start(); 186 } 187 188 if (err == OK) { 189 mStarted = true; 190 } 191 192 return err; 193} 194 195void WifiDisplaySource::PlaybackSession::Track::stopAsync() { 196 ALOGV("Track::stopAsync isAudio=%d", mIsAudio); 197 198 mConverter->shutdownAsync(); 199 200 sp<AMessage> msg = new AMessage(kWhatMediaPullerStopped, id()); 201 202 if (mStarted && mMediaPuller != NULL) { 203 if (mRepeaterSource != NULL) { 204 // Let's unblock MediaPuller's MediaSource::read(). 205 mRepeaterSource->wakeUp(); 206 } 207 208 mMediaPuller->stopAsync(msg); 209 } else { 210 msg->post(); 211 } 212} 213 214void WifiDisplaySource::PlaybackSession::Track::pause() { 215 mMediaPuller->pause(); 216} 217 218void WifiDisplaySource::PlaybackSession::Track::resume() { 219 mMediaPuller->resume(); 220} 221 222void WifiDisplaySource::PlaybackSession::Track::onMessageReceived( 223 const sp<AMessage> &msg) { 224 switch (msg->what()) { 225 case kWhatMediaPullerStopped: 226 { 227 mConverter.clear(); 228 229 mStarted = false; 230 231 sp<AMessage> notify = mNotify->dup(); 232 notify->setInt32("what", kWhatStopped); 233 notify->post(); 234 235 ALOGI("kWhatStopped %s posted", mIsAudio ? "audio" : "video"); 236 break; 237 } 238 239 default: 240 TRESPASS(); 241 } 242} 243 244void WifiDisplaySource::PlaybackSession::Track::queueAccessUnit( 245 const sp<ABuffer> &accessUnit) { 246 mQueuedAccessUnits.push_back(accessUnit); 247} 248 249sp<ABuffer> WifiDisplaySource::PlaybackSession::Track::dequeueAccessUnit() { 250 if (mQueuedAccessUnits.empty()) { 251 return NULL; 252 } 253 254 sp<ABuffer> accessUnit = *mQueuedAccessUnits.begin(); 255 CHECK(accessUnit != NULL); 256 257 mQueuedAccessUnits.erase(mQueuedAccessUnits.begin()); 258 259 return accessUnit; 260} 261 262void WifiDisplaySource::PlaybackSession::Track::setRepeaterSource( 263 const sp<RepeaterSource> &source) { 264 mRepeaterSource = source; 265} 266 267void WifiDisplaySource::PlaybackSession::Track::requestIDRFrame() { 268 if (mIsAudio) { 269 return; 270 } 271 272 if (mRepeaterSource != NULL) { 273 mRepeaterSource->wakeUp(); 274 } 275 276 mConverter->requestIDRFrame(); 277} 278 279bool WifiDisplaySource::PlaybackSession::Track::hasOutputBuffer( 280 int64_t *timeUs) const { 281 *timeUs = 0ll; 282 283 if (mQueuedOutputBuffers.empty()) { 284 return false; 285 } 286 287 const sp<ABuffer> &outputBuffer = *mQueuedOutputBuffers.begin(); 288 289 CHECK(outputBuffer->meta()->findInt64("timeUs", timeUs)); 290 291 return true; 292} 293 294void WifiDisplaySource::PlaybackSession::Track::queueOutputBuffer( 295 const sp<ABuffer> &accessUnit) { 296 mQueuedOutputBuffers.push_back(accessUnit); 297 mLastOutputBufferQueuedTimeUs = ALooper::GetNowUs(); 298} 299 300sp<ABuffer> WifiDisplaySource::PlaybackSession::Track::dequeueOutputBuffer() { 301 CHECK(!mQueuedOutputBuffers.empty()); 302 303 sp<ABuffer> outputBuffer = *mQueuedOutputBuffers.begin(); 304 mQueuedOutputBuffers.erase(mQueuedOutputBuffers.begin()); 305 306 return outputBuffer; 307} 308 309#if SUSPEND_VIDEO_IF_IDLE 310bool WifiDisplaySource::PlaybackSession::Track::isSuspended() const { 311 if (!mQueuedOutputBuffers.empty()) { 312 return false; 313 } 314 315 if (mLastOutputBufferQueuedTimeUs < 0ll) { 316 // We've never seen an output buffer queued, but tracks start 317 // out live, not suspended. 318 return false; 319 } 320 321 // If we've not seen new output data for 60ms or more, we consider 322 // this track suspended for the time being. 323 return (ALooper::GetNowUs() - mLastOutputBufferQueuedTimeUs) > 60000ll; 324} 325#endif 326 327//////////////////////////////////////////////////////////////////////////////// 328 329WifiDisplaySource::PlaybackSession::PlaybackSession( 330 const sp<ANetworkSession> &netSession, 331 const sp<AMessage> ¬ify, 332 const in_addr &interfaceAddr, 333 const sp<IHDCP> &hdcp) 334 : mNetSession(netSession), 335 mNotify(notify), 336 mInterfaceAddr(interfaceAddr), 337 mHDCP(hdcp), 338 mWeAreDead(false), 339 mPaused(false), 340 mLastLifesignUs(), 341 mVideoTrackIndex(-1), 342 mPrevTimeUs(-1ll), 343 mAllTracksHavePacketizerIndex(false) { 344} 345 346status_t WifiDisplaySource::PlaybackSession::init( 347 const char *clientIP, int32_t clientRtp, int32_t clientRtcp, 348 Sender::TransportMode transportMode, 349 bool enableAudio, 350 bool usePCMAudio, 351 bool enableVideo, 352 VideoFormats::ResolutionType videoResolutionType, 353 size_t videoResolutionIndex) { 354 status_t err = setupPacketizer( 355 enableAudio, 356 usePCMAudio, 357 enableVideo, 358 videoResolutionType, 359 videoResolutionIndex); 360 361 if (err != OK) { 362 return err; 363 } 364 365 sp<AMessage> notify = new AMessage(kWhatSenderNotify, id()); 366 mSender = new Sender(mNetSession, notify); 367 368 mSenderLooper = new ALooper; 369 mSenderLooper->setName("sender_looper"); 370 371 mSenderLooper->start( 372 false /* runOnCallingThread */, 373 false /* canCallJava */, 374 PRIORITY_AUDIO); 375 376 mSenderLooper->registerHandler(mSender); 377 378 err = mSender->init(clientIP, clientRtp, clientRtcp, transportMode); 379 380 if (err != OK) { 381 return err; 382 } 383 384 updateLiveness(); 385 386 return OK; 387} 388 389WifiDisplaySource::PlaybackSession::~PlaybackSession() { 390} 391 392int32_t WifiDisplaySource::PlaybackSession::getRTPPort() const { 393 return mSender->getRTPPort(); 394} 395 396int64_t WifiDisplaySource::PlaybackSession::getLastLifesignUs() const { 397 return mLastLifesignUs; 398} 399 400void WifiDisplaySource::PlaybackSession::updateLiveness() { 401 mLastLifesignUs = ALooper::GetNowUs(); 402} 403 404status_t WifiDisplaySource::PlaybackSession::play() { 405 updateLiveness(); 406 407 (new AMessage(kWhatResume, id()))->post(); 408 409 return OK; 410} 411 412status_t WifiDisplaySource::PlaybackSession::finishPlay() { 413 // XXX Give the dongle a second to bind its sockets. 414 (new AMessage(kWhatFinishPlay, id()))->post(1000000ll); 415 return OK; 416} 417 418status_t WifiDisplaySource::PlaybackSession::onFinishPlay() { 419 return mSender->finishInit(); 420} 421 422status_t WifiDisplaySource::PlaybackSession::onFinishPlay2() { 423 mSender->scheduleSendSR(); 424 425 for (size_t i = 0; i < mTracks.size(); ++i) { 426 CHECK_EQ((status_t)OK, mTracks.editValueAt(i)->start()); 427 } 428 429 sp<AMessage> notify = mNotify->dup(); 430 notify->setInt32("what", kWhatSessionEstablished); 431 notify->post(); 432 433 return OK; 434} 435 436status_t WifiDisplaySource::PlaybackSession::pause() { 437 updateLiveness(); 438 439 (new AMessage(kWhatPause, id()))->post(); 440 441 return OK; 442} 443 444void WifiDisplaySource::PlaybackSession::destroyAsync() { 445 ALOGI("destroyAsync"); 446 447 for (size_t i = 0; i < mTracks.size(); ++i) { 448 mTracks.valueAt(i)->stopAsync(); 449 } 450} 451 452void WifiDisplaySource::PlaybackSession::onMessageReceived( 453 const sp<AMessage> &msg) { 454 switch (msg->what()) { 455 case kWhatConverterNotify: 456 { 457 if (mWeAreDead) { 458 ALOGV("dropping msg '%s' because we're dead", 459 msg->debugString().c_str()); 460 461 break; 462 } 463 464 int32_t what; 465 CHECK(msg->findInt32("what", &what)); 466 467 size_t trackIndex; 468 CHECK(msg->findSize("trackIndex", &trackIndex)); 469 470 if (what == Converter::kWhatAccessUnit) { 471 const sp<Track> &track = mTracks.valueFor(trackIndex); 472 473 ssize_t packetizerTrackIndex = track->packetizerTrackIndex(); 474 475 if (packetizerTrackIndex < 0) { 476 sp<AMessage> trackFormat = track->getFormat()->dup(); 477 if (mHDCP != NULL && !track->isAudio()) { 478 // HDCP2.0 _and_ HDCP 2.1 specs say to set the version 479 // inside the HDCP descriptor to 0x20!!! 480 trackFormat->setInt32("hdcp-version", 0x20); 481 } 482 packetizerTrackIndex = mPacketizer->addTrack(trackFormat); 483 484 CHECK_GE(packetizerTrackIndex, 0); 485 486 track->setPacketizerTrackIndex(packetizerTrackIndex); 487 488 if (allTracksHavePacketizerIndex()) { 489 status_t err = packetizeQueuedAccessUnits(); 490 491 if (err != OK) { 492 notifySessionDead(); 493 break; 494 } 495 } 496 } 497 498 sp<ABuffer> accessUnit; 499 CHECK(msg->findBuffer("accessUnit", &accessUnit)); 500 501 if (!allTracksHavePacketizerIndex()) { 502 track->queueAccessUnit(accessUnit); 503 break; 504 } 505 506 track->queueOutputBuffer(accessUnit); 507 508 drainAccessUnits(); 509 break; 510 } else if (what == Converter::kWhatEOS) { 511 CHECK_EQ(what, Converter::kWhatEOS); 512 513 ALOGI("output EOS on track %d", trackIndex); 514 515 ssize_t index = mTracks.indexOfKey(trackIndex); 516 CHECK_GE(index, 0); 517 518 const sp<Converter> &converter = 519 mTracks.valueAt(index)->converter(); 520 looper()->unregisterHandler(converter->id()); 521 522 mTracks.removeItemsAt(index); 523 524 if (mTracks.isEmpty()) { 525 ALOGI("Reached EOS"); 526 } 527 } else { 528 CHECK_EQ(what, Converter::kWhatError); 529 530 status_t err; 531 CHECK(msg->findInt32("err", &err)); 532 533 ALOGE("converter signaled error %d", err); 534 535 notifySessionDead(); 536 } 537 break; 538 } 539 540 case kWhatSenderNotify: 541 { 542 int32_t what; 543 CHECK(msg->findInt32("what", &what)); 544 545 if (what == Sender::kWhatInitDone) { 546 onFinishPlay2(); 547 } else if (what == Sender::kWhatSessionDead) { 548 notifySessionDead(); 549 } else if (what == Sender::kWhatBinaryData) { 550 sp<AMessage> notify = mNotify->dup(); 551 notify->setInt32("what", kWhatBinaryData); 552 553 int32_t channel; 554 CHECK(msg->findInt32("channel", &channel)); 555 notify->setInt32("channel", channel); 556 557 sp<ABuffer> data; 558 CHECK(msg->findBuffer("data", &data)); 559 notify->setBuffer("data", data); 560 notify->post(); 561 } else { 562 TRESPASS(); 563 } 564 565 break; 566 } 567 568 case kWhatFinishPlay: 569 { 570 onFinishPlay(); 571 break; 572 } 573 574 case kWhatTrackNotify: 575 { 576 int32_t what; 577 CHECK(msg->findInt32("what", &what)); 578 579 size_t trackIndex; 580 CHECK(msg->findSize("trackIndex", &trackIndex)); 581 582 if (what == Track::kWhatStopped) { 583 ALOGI("Track %d stopped", trackIndex); 584 585 sp<Track> track = mTracks.valueFor(trackIndex); 586 looper()->unregisterHandler(track->id()); 587 mTracks.removeItem(trackIndex); 588 track.clear(); 589 590 if (!mTracks.isEmpty()) { 591 ALOGI("not all tracks are stopped yet"); 592 break; 593 } 594 595 mSenderLooper->unregisterHandler(mSender->id()); 596 mSender.clear(); 597 mSenderLooper.clear(); 598 599 mPacketizer.clear(); 600 601 sp<AMessage> notify = mNotify->dup(); 602 notify->setInt32("what", kWhatSessionDestroyed); 603 notify->post(); 604 } 605 break; 606 } 607 608 case kWhatPacketize: 609 { 610 size_t trackIndex; 611 CHECK(msg->findSize("trackIndex", &trackIndex)); 612 613 sp<ABuffer> accessUnit; 614 CHECK(msg->findBuffer("accessUnit", &accessUnit)); 615 616#if 0 617 if ((ssize_t)trackIndex == mVideoTrackIndex) { 618 int64_t nowUs = ALooper::GetNowUs(); 619 static int64_t prevNowUs = 0ll; 620 621 ALOGI("sending AU, dNowUs=%lld us", nowUs - prevNowUs); 622 623 prevNowUs = nowUs; 624 } 625#endif 626 627 break; 628 } 629 630 case kWhatPause: 631 { 632 if (mPaused) { 633 break; 634 } 635 636 for (size_t i = 0; i < mTracks.size(); ++i) { 637 mTracks.editValueAt(i)->pause(); 638 } 639 640 mPaused = true; 641 break; 642 } 643 644 case kWhatResume: 645 { 646 if (!mPaused) { 647 break; 648 } 649 650 for (size_t i = 0; i < mTracks.size(); ++i) { 651 mTracks.editValueAt(i)->resume(); 652 } 653 654 mPaused = false; 655 break; 656 } 657 658 default: 659 TRESPASS(); 660 } 661} 662 663status_t WifiDisplaySource::PlaybackSession::setupPacketizer( 664 bool enableAudio, 665 bool usePCMAudio, 666 bool enableVideo, 667 VideoFormats::ResolutionType videoResolutionType, 668 size_t videoResolutionIndex) { 669 CHECK(enableAudio || enableVideo); 670 671 mPacketizer = new TSPacketizer; 672 673 if (enableVideo) { 674 status_t err = addVideoSource( 675 videoResolutionType, videoResolutionIndex); 676 677 if (err != OK) { 678 return err; 679 } 680 } 681 682 if (!enableAudio) { 683 return OK; 684 } 685 686 return addAudioSource(usePCMAudio); 687} 688 689status_t WifiDisplaySource::PlaybackSession::addSource( 690 bool isVideo, const sp<MediaSource> &source, bool isRepeaterSource, 691 bool usePCMAudio, size_t *numInputBuffers) { 692 CHECK(!usePCMAudio || !isVideo); 693 CHECK(!isRepeaterSource || isVideo); 694 695 sp<ALooper> pullLooper = new ALooper; 696 pullLooper->setName("pull_looper"); 697 698 pullLooper->start( 699 false /* runOnCallingThread */, 700 false /* canCallJava */, 701 PRIORITY_AUDIO); 702 703 sp<ALooper> codecLooper = new ALooper; 704 codecLooper->setName("codec_looper"); 705 706 codecLooper->start( 707 false /* runOnCallingThread */, 708 false /* canCallJava */, 709 PRIORITY_AUDIO); 710 711 size_t trackIndex; 712 713 sp<AMessage> notify; 714 715 trackIndex = mTracks.size(); 716 717 sp<AMessage> format; 718 status_t err = convertMetaDataToMessage(source->getFormat(), &format); 719 CHECK_EQ(err, (status_t)OK); 720 721 if (isVideo) { 722 format->setInt32("store-metadata-in-buffers", true); 723 724 format->setInt32( 725 "color-format", OMX_COLOR_FormatAndroidOpaque); 726 } 727 728 notify = new AMessage(kWhatConverterNotify, id()); 729 notify->setSize("trackIndex", trackIndex); 730 731 sp<Converter> converter = 732 new Converter(notify, codecLooper, format, usePCMAudio); 733 734 err = converter->initCheck(); 735 if (err != OK) { 736 ALOGE("%s converter returned err %d", isVideo ? "video" : "audio", err); 737 return err; 738 } 739 740 looper()->registerHandler(converter); 741 742 notify = new AMessage(Converter::kWhatMediaPullerNotify, converter->id()); 743 notify->setSize("trackIndex", trackIndex); 744 745 sp<MediaPuller> puller = new MediaPuller(source, notify); 746 pullLooper->registerHandler(puller); 747 748 if (numInputBuffers != NULL) { 749 *numInputBuffers = converter->getInputBufferCount(); 750 } 751 752 notify = new AMessage(kWhatTrackNotify, id()); 753 notify->setSize("trackIndex", trackIndex); 754 755 sp<Track> track = new Track( 756 notify, pullLooper, codecLooper, puller, converter); 757 758 if (isRepeaterSource) { 759 track->setRepeaterSource(static_cast<RepeaterSource *>(source.get())); 760 } 761 762 looper()->registerHandler(track); 763 764 mTracks.add(trackIndex, track); 765 766 if (isVideo) { 767 mVideoTrackIndex = trackIndex; 768 } 769 770 return OK; 771} 772 773status_t WifiDisplaySource::PlaybackSession::addVideoSource( 774 VideoFormats::ResolutionType videoResolutionType, 775 size_t videoResolutionIndex) { 776 size_t width, height, framesPerSecond; 777 bool interlaced; 778 CHECK(VideoFormats::GetConfiguration( 779 videoResolutionType, 780 videoResolutionIndex, 781 &width, 782 &height, 783 &framesPerSecond, 784 &interlaced)); 785 786 sp<SurfaceMediaSource> source = new SurfaceMediaSource(width, height); 787 788 source->setUseAbsoluteTimestamps(); 789 790 sp<RepeaterSource> videoSource = 791 new RepeaterSource(source, framesPerSecond); 792 793 size_t numInputBuffers; 794 status_t err = addSource( 795 true /* isVideo */, videoSource, true /* isRepeaterSource */, 796 false /* usePCMAudio */, &numInputBuffers); 797 798 if (err != OK) { 799 return err; 800 } 801 802 err = source->setMaxAcquiredBufferCount(numInputBuffers); 803 CHECK_EQ(err, (status_t)OK); 804 805 mBufferQueue = source->getBufferQueue(); 806 807 return OK; 808} 809 810status_t WifiDisplaySource::PlaybackSession::addAudioSource(bool usePCMAudio) { 811 sp<AudioSource> audioSource = new AudioSource( 812 AUDIO_SOURCE_REMOTE_SUBMIX, 813 48000 /* sampleRate */, 814 2 /* channelCount */); 815 816 if (audioSource->initCheck() == OK) { 817 return addSource( 818 false /* isVideo */, audioSource, false /* isRepeaterSource */, 819 usePCMAudio, NULL /* numInputBuffers */); 820 } 821 822 ALOGW("Unable to instantiate audio source"); 823 824 return OK; 825} 826 827sp<IGraphicBufferProducer> WifiDisplaySource::PlaybackSession::getSurfaceTexture() { 828 return mBufferQueue; 829} 830 831void WifiDisplaySource::PlaybackSession::requestIDRFrame() { 832 for (size_t i = 0; i < mTracks.size(); ++i) { 833 const sp<Track> &track = mTracks.valueAt(i); 834 835 track->requestIDRFrame(); 836 } 837} 838 839bool WifiDisplaySource::PlaybackSession::allTracksHavePacketizerIndex() { 840 if (mAllTracksHavePacketizerIndex) { 841 return true; 842 } 843 844 for (size_t i = 0; i < mTracks.size(); ++i) { 845 if (mTracks.valueAt(i)->packetizerTrackIndex() < 0) { 846 return false; 847 } 848 } 849 850 mAllTracksHavePacketizerIndex = true; 851 852 return true; 853} 854 855status_t WifiDisplaySource::PlaybackSession::packetizeAccessUnit( 856 size_t trackIndex, sp<ABuffer> accessUnit, 857 sp<ABuffer> *packets) { 858 const sp<Track> &track = mTracks.valueFor(trackIndex); 859 860 uint32_t flags = 0; 861 862 bool isHDCPEncrypted = false; 863 uint64_t inputCTR; 864 uint8_t HDCP_private_data[16]; 865 866 bool manuallyPrependSPSPPS = 867 !track->isAudio() 868 && track->converter()->needToManuallyPrependSPSPPS() 869 && IsIDR(accessUnit); 870 871 if (mHDCP != NULL && !track->isAudio()) { 872 isHDCPEncrypted = true; 873 874 if (manuallyPrependSPSPPS) { 875 accessUnit = mPacketizer->prependCSD( 876 track->packetizerTrackIndex(), accessUnit); 877 } 878 879 status_t err = mHDCP->encrypt( 880 accessUnit->data(), accessUnit->size(), 881 trackIndex /* streamCTR */, 882 &inputCTR, 883 accessUnit->data()); 884 885 if (err != OK) { 886 ALOGE("Failed to HDCP-encrypt media data (err %d)", 887 err); 888 889 return err; 890 } 891 892 HDCP_private_data[0] = 0x00; 893 894 HDCP_private_data[1] = 895 (((trackIndex >> 30) & 3) << 1) | 1; 896 897 HDCP_private_data[2] = (trackIndex >> 22) & 0xff; 898 899 HDCP_private_data[3] = 900 (((trackIndex >> 15) & 0x7f) << 1) | 1; 901 902 HDCP_private_data[4] = (trackIndex >> 7) & 0xff; 903 904 HDCP_private_data[5] = 905 ((trackIndex & 0x7f) << 1) | 1; 906 907 HDCP_private_data[6] = 0x00; 908 909 HDCP_private_data[7] = 910 (((inputCTR >> 60) & 0x0f) << 1) | 1; 911 912 HDCP_private_data[8] = (inputCTR >> 52) & 0xff; 913 914 HDCP_private_data[9] = 915 (((inputCTR >> 45) & 0x7f) << 1) | 1; 916 917 HDCP_private_data[10] = (inputCTR >> 37) & 0xff; 918 919 HDCP_private_data[11] = 920 (((inputCTR >> 30) & 0x7f) << 1) | 1; 921 922 HDCP_private_data[12] = (inputCTR >> 22) & 0xff; 923 924 HDCP_private_data[13] = 925 (((inputCTR >> 15) & 0x7f) << 1) | 1; 926 927 HDCP_private_data[14] = (inputCTR >> 7) & 0xff; 928 929 HDCP_private_data[15] = 930 ((inputCTR & 0x7f) << 1) | 1; 931 932#if 0 933 ALOGI("HDCP_private_data:"); 934 hexdump(HDCP_private_data, sizeof(HDCP_private_data)); 935 936 ABitReader br(HDCP_private_data, sizeof(HDCP_private_data)); 937 CHECK_EQ(br.getBits(13), 0); 938 CHECK_EQ(br.getBits(2), (trackIndex >> 30) & 3); 939 CHECK_EQ(br.getBits(1), 1u); 940 CHECK_EQ(br.getBits(15), (trackIndex >> 15) & 0x7fff); 941 CHECK_EQ(br.getBits(1), 1u); 942 CHECK_EQ(br.getBits(15), trackIndex & 0x7fff); 943 CHECK_EQ(br.getBits(1), 1u); 944 CHECK_EQ(br.getBits(11), 0); 945 CHECK_EQ(br.getBits(4), (inputCTR >> 60) & 0xf); 946 CHECK_EQ(br.getBits(1), 1u); 947 CHECK_EQ(br.getBits(15), (inputCTR >> 45) & 0x7fff); 948 CHECK_EQ(br.getBits(1), 1u); 949 CHECK_EQ(br.getBits(15), (inputCTR >> 30) & 0x7fff); 950 CHECK_EQ(br.getBits(1), 1u); 951 CHECK_EQ(br.getBits(15), (inputCTR >> 15) & 0x7fff); 952 CHECK_EQ(br.getBits(1), 1u); 953 CHECK_EQ(br.getBits(15), inputCTR & 0x7fff); 954 CHECK_EQ(br.getBits(1), 1u); 955#endif 956 957 flags |= TSPacketizer::IS_ENCRYPTED; 958 } else if (manuallyPrependSPSPPS) { 959 flags |= TSPacketizer::PREPEND_SPS_PPS_TO_IDR_FRAMES; 960 } 961 962 int64_t timeUs = ALooper::GetNowUs(); 963 if (mPrevTimeUs < 0ll || mPrevTimeUs + 100000ll <= timeUs) { 964 flags |= TSPacketizer::EMIT_PCR; 965 flags |= TSPacketizer::EMIT_PAT_AND_PMT; 966 967 mPrevTimeUs = timeUs; 968 } 969 970 mPacketizer->packetize( 971 track->packetizerTrackIndex(), accessUnit, packets, flags, 972 !isHDCPEncrypted ? NULL : HDCP_private_data, 973 !isHDCPEncrypted ? 0 : sizeof(HDCP_private_data), 974 track->isAudio() ? 2 : 0 /* numStuffingBytes */); 975 976 return OK; 977} 978 979status_t WifiDisplaySource::PlaybackSession::packetizeQueuedAccessUnits() { 980 for (;;) { 981 bool gotMoreData = false; 982 for (size_t i = 0; i < mTracks.size(); ++i) { 983 size_t trackIndex = mTracks.keyAt(i); 984 const sp<Track> &track = mTracks.valueAt(i); 985 986 sp<ABuffer> accessUnit = track->dequeueAccessUnit(); 987 if (accessUnit != NULL) { 988 track->queueOutputBuffer(accessUnit); 989 gotMoreData = true; 990 } 991 } 992 993 if (!gotMoreData) { 994 break; 995 } 996 } 997 998 return OK; 999} 1000 1001void WifiDisplaySource::PlaybackSession::notifySessionDead() { 1002 // Inform WifiDisplaySource of our premature death (wish). 1003 sp<AMessage> notify = mNotify->dup(); 1004 notify->setInt32("what", kWhatSessionDead); 1005 notify->post(); 1006 1007 mWeAreDead = true; 1008} 1009 1010void WifiDisplaySource::PlaybackSession::drainAccessUnits() { 1011 ALOGV("audio/video has %d/%d buffers ready.", 1012 mTracks.valueFor(1)->countQueuedOutputBuffers(), 1013 mTracks.valueFor(0)->countQueuedOutputBuffers()); 1014 1015 while (drainAccessUnit()) { 1016 } 1017} 1018 1019bool WifiDisplaySource::PlaybackSession::drainAccessUnit() { 1020 ssize_t minTrackIndex = -1; 1021 int64_t minTimeUs = -1ll; 1022 1023 for (size_t i = 0; i < mTracks.size(); ++i) { 1024 const sp<Track> &track = mTracks.valueAt(i); 1025 1026 int64_t timeUs; 1027 if (track->hasOutputBuffer(&timeUs)) { 1028 if (minTrackIndex < 0 || timeUs < minTimeUs) { 1029 minTrackIndex = mTracks.keyAt(i); 1030 minTimeUs = timeUs; 1031 } 1032 } 1033#if SUSPEND_VIDEO_IF_IDLE 1034 else if (!track->isSuspended()) { 1035 // We still consider this track "live", so it should keep 1036 // delivering output data whose time stamps we'll have to 1037 // consider for proper interleaving. 1038 return false; 1039 } 1040#else 1041 else { 1042 // We need access units available on all tracks to be able to 1043 // dequeue the earliest one. 1044 return false; 1045 } 1046#endif 1047 } 1048 1049 if (minTrackIndex < 0) { 1050 return false; 1051 } 1052 1053 const sp<Track> &track = mTracks.valueFor(minTrackIndex); 1054 sp<ABuffer> accessUnit = track->dequeueOutputBuffer(); 1055 1056 sp<ABuffer> packets; 1057 status_t err = packetizeAccessUnit(minTrackIndex, accessUnit, &packets); 1058 1059 if (err != OK) { 1060 notifySessionDead(); 1061 return false; 1062 } 1063 1064 if ((ssize_t)minTrackIndex == mVideoTrackIndex) { 1065 packets->meta()->setInt32("isVideo", 1); 1066 } 1067 mSender->queuePackets(minTimeUs, packets); 1068 1069#if 0 1070 if (minTrackIndex == mVideoTrackIndex) { 1071 int64_t nowUs = ALooper::GetNowUs(); 1072 1073 // Latency from "data acquired" to "ready to send if we wanted to". 1074 ALOGI("[%s] latencyUs = %lld ms", 1075 minTrackIndex == mVideoTrackIndex ? "video" : "audio", 1076 (nowUs - minTimeUs) / 1000ll); 1077 } 1078#endif 1079 1080 return true; 1081} 1082 1083} // namespace android 1084 1085