NuPlayer.cpp revision a298101317e5472f6b6a12e6ddeafdc4064bd5b2
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "NuPlayer" 19#include <utils/Log.h> 20 21#include "NuPlayer.h" 22 23#include "HTTPLiveSource.h" 24#include "NuPlayerDecoder.h" 25#include "NuPlayerDecoderPassThrough.h" 26#include "NuPlayerDriver.h" 27#include "NuPlayerRenderer.h" 28#include "NuPlayerSource.h" 29#include "RTSPSource.h" 30#include "StreamingSource.h" 31#include "GenericSource.h" 32#include "TextDescriptions.h" 33 34#include "ATSParser.h" 35 36#include <media/stagefright/foundation/hexdump.h> 37#include <media/stagefright/foundation/ABuffer.h> 38#include <media/stagefright/foundation/ADebug.h> 39#include <media/stagefright/foundation/AMessage.h> 40#include <media/stagefright/MediaBuffer.h> 41#include <media/stagefright/MediaDefs.h> 42#include <media/stagefright/MediaErrors.h> 43#include <media/stagefright/MetaData.h> 44#include <gui/IGraphicBufferProducer.h> 45 46#include "avc_utils.h" 47 48#include "ESDS.h" 49#include <media/stagefright/Utils.h> 50 51namespace android { 52 53struct NuPlayer::Action : public RefBase { 54 Action() {} 55 56 virtual void execute(NuPlayer *player) = 0; 57 58private: 59 DISALLOW_EVIL_CONSTRUCTORS(Action); 60}; 61 62struct NuPlayer::SeekAction : public Action { 63 SeekAction(int64_t seekTimeUs) 64 : mSeekTimeUs(seekTimeUs) { 65 } 66 67 virtual void execute(NuPlayer *player) { 68 player->performSeek(mSeekTimeUs); 69 } 70 71private: 72 int64_t mSeekTimeUs; 73 74 DISALLOW_EVIL_CONSTRUCTORS(SeekAction); 75}; 76 77struct NuPlayer::SetSurfaceAction : public Action { 78 SetSurfaceAction(const sp<NativeWindowWrapper> &wrapper) 79 : mWrapper(wrapper) { 80 } 81 82 virtual void execute(NuPlayer *player) { 83 player->performSetSurface(mWrapper); 84 } 85 86private: 87 sp<NativeWindowWrapper> mWrapper; 88 89 DISALLOW_EVIL_CONSTRUCTORS(SetSurfaceAction); 90}; 91 92struct NuPlayer::ShutdownDecoderAction : public Action { 93 ShutdownDecoderAction(bool audio, bool video) 94 : mAudio(audio), 95 mVideo(video) { 96 } 97 98 virtual void execute(NuPlayer *player) { 99 player->performDecoderShutdown(mAudio, mVideo); 100 } 101 102private: 103 bool mAudio; 104 bool mVideo; 105 106 DISALLOW_EVIL_CONSTRUCTORS(ShutdownDecoderAction); 107}; 108 109struct NuPlayer::PostMessageAction : public Action { 110 PostMessageAction(const sp<AMessage> &msg) 111 : mMessage(msg) { 112 } 113 114 virtual void execute(NuPlayer *) { 115 mMessage->post(); 116 } 117 118private: 119 sp<AMessage> mMessage; 120 121 DISALLOW_EVIL_CONSTRUCTORS(PostMessageAction); 122}; 123 124// Use this if there's no state necessary to save in order to execute 125// the action. 126struct NuPlayer::SimpleAction : public Action { 127 typedef void (NuPlayer::*ActionFunc)(); 128 129 SimpleAction(ActionFunc func) 130 : mFunc(func) { 131 } 132 133 virtual void execute(NuPlayer *player) { 134 (player->*mFunc)(); 135 } 136 137private: 138 ActionFunc mFunc; 139 140 DISALLOW_EVIL_CONSTRUCTORS(SimpleAction); 141}; 142 143//////////////////////////////////////////////////////////////////////////////// 144 145NuPlayer::NuPlayer() 146 : mUIDValid(false), 147 mSourceFlags(0), 148 mVideoIsAVC(false), 149 mOffloadAudio(false), 150 mAudioEOS(false), 151 mVideoEOS(false), 152 mScanSourcesPending(false), 153 mScanSourcesGeneration(0), 154 mPollDurationGeneration(0), 155 mTimedTextGeneration(0), 156 mTimeDiscontinuityPending(false), 157 mFlushingAudio(NONE), 158 mFlushingVideo(NONE), 159 mSkipRenderingAudioUntilMediaTimeUs(-1ll), 160 mSkipRenderingVideoUntilMediaTimeUs(-1ll), 161 mVideoLateByUs(0ll), 162 mNumFramesTotal(0ll), 163 mNumFramesDropped(0ll), 164 mVideoScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW), 165 mStarted(false) { 166} 167 168NuPlayer::~NuPlayer() { 169} 170 171void NuPlayer::setUID(uid_t uid) { 172 mUIDValid = true; 173 mUID = uid; 174} 175 176void NuPlayer::setDriver(const wp<NuPlayerDriver> &driver) { 177 mDriver = driver; 178} 179 180void NuPlayer::setDataSourceAsync(const sp<IStreamSource> &source) { 181 sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); 182 183 sp<AMessage> notify = new AMessage(kWhatSourceNotify, id()); 184 185 msg->setObject("source", new StreamingSource(notify, source)); 186 msg->post(); 187} 188 189static bool IsHTTPLiveURL(const char *url) { 190 if (!strncasecmp("http://", url, 7) 191 || !strncasecmp("https://", url, 8) 192 || !strncasecmp("file://", url, 7)) { 193 size_t len = strlen(url); 194 if (len >= 5 && !strcasecmp(".m3u8", &url[len - 5])) { 195 return true; 196 } 197 198 if (strstr(url,"m3u8")) { 199 return true; 200 } 201 } 202 203 return false; 204} 205 206void NuPlayer::setDataSourceAsync( 207 const sp<IMediaHTTPService> &httpService, 208 const char *url, 209 const KeyedVector<String8, String8> *headers) { 210 sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); 211 size_t len = strlen(url); 212 213 sp<AMessage> notify = new AMessage(kWhatSourceNotify, id()); 214 215 sp<Source> source; 216 if (IsHTTPLiveURL(url)) { 217 source = new HTTPLiveSource(notify, httpService, url, headers); 218 } else if (!strncasecmp(url, "rtsp://", 7)) { 219 source = new RTSPSource( 220 notify, httpService, url, headers, mUIDValid, mUID); 221 } else if ((!strncasecmp(url, "http://", 7) 222 || !strncasecmp(url, "https://", 8)) 223 && ((len >= 4 && !strcasecmp(".sdp", &url[len - 4])) 224 || strstr(url, ".sdp?"))) { 225 source = new RTSPSource( 226 notify, httpService, url, headers, mUIDValid, mUID, true); 227 } else if ((!strncasecmp(url, "widevine://", 11))) { 228 source = new GenericSource(notify, httpService, url, headers, 229 true /* isWidevine */, mUIDValid, mUID); 230 mSourceFlags |= Source::FLAG_SECURE; 231 } else { 232 source = new GenericSource(notify, httpService, url, headers); 233 } 234 235 msg->setObject("source", source); 236 msg->post(); 237} 238 239void NuPlayer::setDataSourceAsync(int fd, int64_t offset, int64_t length) { 240 sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); 241 242 sp<AMessage> notify = new AMessage(kWhatSourceNotify, id()); 243 244 sp<Source> source = new GenericSource(notify, fd, offset, length); 245 msg->setObject("source", source); 246 msg->post(); 247} 248 249void NuPlayer::prepareAsync() { 250 (new AMessage(kWhatPrepare, id()))->post(); 251} 252 253void NuPlayer::setVideoSurfaceTextureAsync( 254 const sp<IGraphicBufferProducer> &bufferProducer) { 255 sp<AMessage> msg = new AMessage(kWhatSetVideoNativeWindow, id()); 256 257 if (bufferProducer == NULL) { 258 msg->setObject("native-window", NULL); 259 } else { 260 msg->setObject( 261 "native-window", 262 new NativeWindowWrapper( 263 new Surface(bufferProducer))); 264 } 265 266 msg->post(); 267} 268 269void NuPlayer::setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink) { 270 sp<AMessage> msg = new AMessage(kWhatSetAudioSink, id()); 271 msg->setObject("sink", sink); 272 msg->post(); 273} 274 275void NuPlayer::start() { 276 (new AMessage(kWhatStart, id()))->post(); 277} 278 279void NuPlayer::pause() { 280 (new AMessage(kWhatPause, id()))->post(); 281} 282 283void NuPlayer::resume() { 284 (new AMessage(kWhatResume, id()))->post(); 285} 286 287void NuPlayer::resetAsync() { 288 (new AMessage(kWhatReset, id()))->post(); 289} 290 291void NuPlayer::seekToAsync(int64_t seekTimeUs) { 292 sp<AMessage> msg = new AMessage(kWhatSeek, id()); 293 msg->setInt64("seekTimeUs", seekTimeUs); 294 msg->post(); 295} 296 297// static 298bool NuPlayer::IsFlushingState(FlushStatus state, bool *needShutdown) { 299 switch (state) { 300 case FLUSHING_DECODER: 301 if (needShutdown != NULL) { 302 *needShutdown = false; 303 } 304 return true; 305 306 case FLUSHING_DECODER_SHUTDOWN: 307 if (needShutdown != NULL) { 308 *needShutdown = true; 309 } 310 return true; 311 312 default: 313 return false; 314 } 315} 316 317void NuPlayer::writeTrackInfo( 318 Parcel* reply, const sp<AMessage> format) const { 319 int32_t trackType; 320 CHECK(format->findInt32("type", &trackType)); 321 322 AString lang; 323 CHECK(format->findString("language", &lang)); 324 325 reply->writeInt32(2); // write something non-zero 326 reply->writeInt32(trackType); 327 reply->writeString16(String16(lang.c_str())); 328 329 if (trackType == MEDIA_TRACK_TYPE_SUBTITLE) { 330 AString mime; 331 CHECK(format->findString("mime", &mime)); 332 333 int32_t isAuto, isDefault, isForced; 334 CHECK(format->findInt32("auto", &isAuto)); 335 CHECK(format->findInt32("default", &isDefault)); 336 CHECK(format->findInt32("forced", &isForced)); 337 338 reply->writeString16(String16(mime.c_str())); 339 reply->writeInt32(isAuto); 340 reply->writeInt32(isDefault); 341 reply->writeInt32(isForced); 342 } 343} 344 345void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { 346 switch (msg->what()) { 347 case kWhatSetDataSource: 348 { 349 ALOGV("kWhatSetDataSource"); 350 351 CHECK(mSource == NULL); 352 353 sp<RefBase> obj; 354 CHECK(msg->findObject("source", &obj)); 355 356 mSource = static_cast<Source *>(obj.get()); 357 358 looper()->registerHandler(mSource); 359 360 CHECK(mDriver != NULL); 361 sp<NuPlayerDriver> driver = mDriver.promote(); 362 if (driver != NULL) { 363 driver->notifySetDataSourceCompleted(OK); 364 } 365 break; 366 } 367 368 case kWhatPrepare: 369 { 370 mSource->prepareAsync(); 371 break; 372 } 373 374 case kWhatGetTrackInfo: 375 { 376 uint32_t replyID; 377 CHECK(msg->senderAwaitsResponse(&replyID)); 378 379 Parcel* reply; 380 CHECK(msg->findPointer("reply", (void**)&reply)); 381 382 size_t inbandTracks = 0; 383 if (mSource != NULL) { 384 inbandTracks = mSource->getTrackCount(); 385 } 386 387 size_t ccTracks = 0; 388 if (mCCDecoder != NULL) { 389 ccTracks = mCCDecoder->getTrackCount(); 390 } 391 392 // total track count 393 reply->writeInt32(inbandTracks + ccTracks); 394 395 // write inband tracks 396 for (size_t i = 0; i < inbandTracks; ++i) { 397 writeTrackInfo(reply, mSource->getTrackInfo(i)); 398 } 399 400 // write CC track 401 for (size_t i = 0; i < ccTracks; ++i) { 402 writeTrackInfo(reply, mCCDecoder->getTrackInfo(i)); 403 } 404 405 sp<AMessage> response = new AMessage; 406 response->postReply(replyID); 407 break; 408 } 409 410 case kWhatSelectTrack: 411 { 412 uint32_t replyID; 413 CHECK(msg->senderAwaitsResponse(&replyID)); 414 415 size_t trackIndex; 416 int32_t select; 417 CHECK(msg->findSize("trackIndex", &trackIndex)); 418 CHECK(msg->findInt32("select", &select)); 419 420 status_t err = INVALID_OPERATION; 421 422 size_t inbandTracks = 0; 423 if (mSource != NULL) { 424 inbandTracks = mSource->getTrackCount(); 425 } 426 size_t ccTracks = 0; 427 if (mCCDecoder != NULL) { 428 ccTracks = mCCDecoder->getTrackCount(); 429 } 430 431 if (trackIndex < inbandTracks) { 432 err = mSource->selectTrack(trackIndex, select); 433 434 if (!select && err == OK) { 435 int32_t type; 436 sp<AMessage> info = mSource->getTrackInfo(trackIndex); 437 if (info != NULL 438 && info->findInt32("type", &type) 439 && type == MEDIA_TRACK_TYPE_TIMEDTEXT) { 440 ++mTimedTextGeneration; 441 } 442 } 443 } else { 444 trackIndex -= inbandTracks; 445 446 if (trackIndex < ccTracks) { 447 err = mCCDecoder->selectTrack(trackIndex, select); 448 } 449 } 450 451 sp<AMessage> response = new AMessage; 452 response->setInt32("err", err); 453 454 response->postReply(replyID); 455 break; 456 } 457 458 case kWhatPollDuration: 459 { 460 int32_t generation; 461 CHECK(msg->findInt32("generation", &generation)); 462 463 if (generation != mPollDurationGeneration) { 464 // stale 465 break; 466 } 467 468 int64_t durationUs; 469 if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) { 470 sp<NuPlayerDriver> driver = mDriver.promote(); 471 if (driver != NULL) { 472 driver->notifyDuration(durationUs); 473 } 474 } 475 476 msg->post(1000000ll); // poll again in a second. 477 break; 478 } 479 480 case kWhatSetVideoNativeWindow: 481 { 482 ALOGV("kWhatSetVideoNativeWindow"); 483 484 mDeferredActions.push_back( 485 new ShutdownDecoderAction( 486 false /* audio */, true /* video */)); 487 488 sp<RefBase> obj; 489 CHECK(msg->findObject("native-window", &obj)); 490 491 mDeferredActions.push_back( 492 new SetSurfaceAction( 493 static_cast<NativeWindowWrapper *>(obj.get()))); 494 495 if (obj != NULL) { 496 // If there is a new surface texture, instantiate decoders 497 // again if possible. 498 mDeferredActions.push_back( 499 new SimpleAction(&NuPlayer::performScanSources)); 500 } 501 502 processDeferredActions(); 503 break; 504 } 505 506 case kWhatSetAudioSink: 507 { 508 ALOGV("kWhatSetAudioSink"); 509 510 sp<RefBase> obj; 511 CHECK(msg->findObject("sink", &obj)); 512 513 mAudioSink = static_cast<MediaPlayerBase::AudioSink *>(obj.get()); 514 break; 515 } 516 517 case kWhatStart: 518 { 519 ALOGV("kWhatStart"); 520 521 mVideoIsAVC = false; 522 mOffloadAudio = false; 523 mAudioEOS = false; 524 mVideoEOS = false; 525 mSkipRenderingAudioUntilMediaTimeUs = -1; 526 mSkipRenderingVideoUntilMediaTimeUs = -1; 527 mVideoLateByUs = 0; 528 mNumFramesTotal = 0; 529 mNumFramesDropped = 0; 530 mStarted = true; 531 532 /* instantiate decoders now for secure playback */ 533 if (mSourceFlags & Source::FLAG_SECURE) { 534 if (mNativeWindow != NULL) { 535 instantiateDecoder(false, &mVideoDecoder); 536 } 537 538 if (mAudioSink != NULL) { 539 instantiateDecoder(true, &mAudioDecoder); 540 } 541 } 542 543 mSource->start(); 544 545 uint32_t flags = 0; 546 547 if (mSource->isRealTime()) { 548 flags |= Renderer::FLAG_REAL_TIME; 549 } 550 551 sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */); 552 audio_stream_type_t streamType = AUDIO_STREAM_MUSIC; 553 if (mAudioSink != NULL) { 554 streamType = mAudioSink->getAudioStreamType(); 555 } 556 557 sp<AMessage> videoFormat = mSource->getFormat(false /* audio */); 558 559 mOffloadAudio = 560 canOffloadStream(audioMeta, (videoFormat != NULL), 561 true /* is_streaming */, streamType); 562 if (mOffloadAudio) { 563 flags |= Renderer::FLAG_OFFLOAD_AUDIO; 564 } 565 566 mRenderer = new Renderer( 567 mAudioSink, 568 new AMessage(kWhatRendererNotify, id()), 569 flags); 570 571 mRendererLooper = new ALooper; 572 mRendererLooper->setName("NuPlayerRenderer"); 573 mRendererLooper->start(false, false, ANDROID_PRIORITY_AUDIO); 574 mRendererLooper->registerHandler(mRenderer); 575 576 postScanSources(); 577 break; 578 } 579 580 case kWhatScanSources: 581 { 582 int32_t generation; 583 CHECK(msg->findInt32("generation", &generation)); 584 if (generation != mScanSourcesGeneration) { 585 // Drop obsolete msg. 586 break; 587 } 588 589 mScanSourcesPending = false; 590 591 ALOGV("scanning sources haveAudio=%d, haveVideo=%d", 592 mAudioDecoder != NULL, mVideoDecoder != NULL); 593 594 bool mHadAnySourcesBefore = 595 (mAudioDecoder != NULL) || (mVideoDecoder != NULL); 596 597 if (mNativeWindow != NULL) { 598 instantiateDecoder(false, &mVideoDecoder); 599 } 600 601 if (mAudioSink != NULL) { 602 instantiateDecoder(true, &mAudioDecoder); 603 } 604 605 if (!mHadAnySourcesBefore 606 && (mAudioDecoder != NULL || mVideoDecoder != NULL)) { 607 // This is the first time we've found anything playable. 608 609 if (mSourceFlags & Source::FLAG_DYNAMIC_DURATION) { 610 schedulePollDuration(); 611 } 612 } 613 614 status_t err; 615 if ((err = mSource->feedMoreTSData()) != OK) { 616 if (mAudioDecoder == NULL && mVideoDecoder == NULL) { 617 // We're not currently decoding anything (no audio or 618 // video tracks found) and we just ran out of input data. 619 620 if (err == ERROR_END_OF_STREAM) { 621 notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0); 622 } else { 623 notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err); 624 } 625 } 626 break; 627 } 628 629 if ((mAudioDecoder == NULL && mAudioSink != NULL) 630 || (mVideoDecoder == NULL && mNativeWindow != NULL)) { 631 msg->post(100000ll); 632 mScanSourcesPending = true; 633 } 634 break; 635 } 636 637 case kWhatVideoNotify: 638 case kWhatAudioNotify: 639 { 640 bool audio = msg->what() == kWhatAudioNotify; 641 642 int32_t what; 643 CHECK(msg->findInt32("what", &what)); 644 645 if (what == Decoder::kWhatFillThisBuffer) { 646 status_t err = feedDecoderInputData( 647 audio, msg); 648 649 if (err == -EWOULDBLOCK) { 650 if (mSource->feedMoreTSData() == OK) { 651 msg->post(10000ll); 652 } 653 } 654 } else if (what == Decoder::kWhatEOS) { 655 int32_t err; 656 CHECK(msg->findInt32("err", &err)); 657 658 if (err == ERROR_END_OF_STREAM) { 659 ALOGV("got %s decoder EOS", audio ? "audio" : "video"); 660 } else { 661 ALOGV("got %s decoder EOS w/ error %d", 662 audio ? "audio" : "video", 663 err); 664 } 665 666 mRenderer->queueEOS(audio, err); 667 } else if (what == Decoder::kWhatFlushCompleted) { 668 bool needShutdown; 669 670 if (audio) { 671 CHECK(IsFlushingState(mFlushingAudio, &needShutdown)); 672 mFlushingAudio = FLUSHED; 673 } else { 674 CHECK(IsFlushingState(mFlushingVideo, &needShutdown)); 675 mFlushingVideo = FLUSHED; 676 677 mVideoLateByUs = 0; 678 } 679 680 ALOGV("decoder %s flush completed", audio ? "audio" : "video"); 681 682 if (needShutdown) { 683 ALOGV("initiating %s decoder shutdown", 684 audio ? "audio" : "video"); 685 686 (audio ? mAudioDecoder : mVideoDecoder)->initiateShutdown(); 687 688 if (audio) { 689 mFlushingAudio = SHUTTING_DOWN_DECODER; 690 } else { 691 mFlushingVideo = SHUTTING_DOWN_DECODER; 692 } 693 } 694 695 finishFlushIfPossible(); 696 } else if (what == Decoder::kWhatOutputFormatChanged) { 697 sp<AMessage> format; 698 CHECK(msg->findMessage("format", &format)); 699 700 if (audio) { 701 int32_t numChannels; 702 CHECK(format->findInt32( 703 "channel-count", &numChannels)); 704 705 int32_t sampleRate; 706 CHECK(format->findInt32("sample-rate", &sampleRate)); 707 708 ALOGV("Audio output format changed to %d Hz, %d channels", 709 sampleRate, numChannels); 710 711 mAudioSink->close(); 712 713 uint32_t flags; 714 int64_t durationUs; 715 // FIXME: we should handle the case where the video decoder 716 // is created after we receive the format change indication. 717 // Current code will just make that we select deep buffer 718 // with video which should not be a problem as it should 719 // not prevent from keeping A/V sync. 720 if (mVideoDecoder == NULL && 721 mSource->getDuration(&durationUs) == OK && 722 durationUs 723 > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) { 724 flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER; 725 } else { 726 flags = AUDIO_OUTPUT_FLAG_NONE; 727 } 728 729 int32_t channelMask; 730 if (!format->findInt32("channel-mask", &channelMask)) { 731 channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER; 732 } 733 734 if (mOffloadAudio) { 735 audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT; 736 audio_offload_info_t offloadInfo = 737 AUDIO_INFO_INITIALIZER; 738 739 AString mime; 740 CHECK(format->findString("mime", &mime)); 741 742 status_t err = 743 mapMimeToAudioFormat(audioFormat, mime.c_str()); 744 if (err != OK) { 745 ALOGE("Couldn't map mime \"%s\" to a valid " 746 "audio_format", mime.c_str()); 747 mOffloadAudio = false; 748 } else { 749 ALOGV("Mime \"%s\" mapped to audio_format 0x%x", 750 mime.c_str(), audioFormat); 751 752 flags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD; 753 754 offloadInfo.duration_us = -1; 755 format->findInt64( 756 "durationUs", &offloadInfo.duration_us); 757 758 int avgBitRate = -1; 759 format->findInt32("bit-rate", &avgBitRate); 760 761 offloadInfo.sample_rate = sampleRate; 762 offloadInfo.channel_mask = channelMask; 763 offloadInfo.format = audioFormat; 764 offloadInfo.stream_type = AUDIO_STREAM_MUSIC; 765 offloadInfo.bit_rate = avgBitRate; 766 offloadInfo.has_video = (mVideoDecoder != NULL); 767 offloadInfo.is_streaming = true; 768 769 ALOGV("try to open AudioSink in offload mode"); 770 err = mAudioSink->open( 771 sampleRate, 772 numChannels, 773 (audio_channel_mask_t)channelMask, 774 audioFormat, 775 8 /* bufferCount */, 776 &NuPlayer::Renderer::AudioSinkCallback, 777 mRenderer.get(), 778 (audio_output_flags_t)flags, 779 &offloadInfo); 780 781 if (err == OK) { 782 // If the playback is offloaded to h/w, we pass 783 // the HAL some metadata information. 784 // We don't want to do this for PCM because it 785 // will be going through the AudioFlinger mixer 786 // before reaching the hardware. 787 sp<MetaData> audioMeta = 788 mSource->getFormatMeta(true /* audio */); 789 sendMetaDataToHal(mAudioSink, audioMeta); 790 791 err = mAudioSink->start(); 792 } 793 } 794 795 if (err != OK) { 796 // Clean up, fall back to non offload mode. 797 mAudioSink->close(); 798 mAudioDecoder.clear(); 799 mRenderer->signalDisableOffloadAudio(); 800 mOffloadAudio = false; 801 802 instantiateDecoder( 803 true /* audio */, &mAudioDecoder); 804 } 805 } 806 807 if (!mOffloadAudio) { 808 flags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD; 809 ALOGV("open AudioSink in NON-offload mode"); 810 CHECK_EQ(mAudioSink->open( 811 sampleRate, 812 numChannels, 813 (audio_channel_mask_t)channelMask, 814 AUDIO_FORMAT_PCM_16_BIT, 815 8 /* bufferCount */, 816 NULL, 817 NULL, 818 (audio_output_flags_t)flags), 819 (status_t)OK); 820 mAudioSink->start(); 821 } 822 823 mRenderer->signalAudioSinkChanged(); 824 } else { 825 // video 826 827 int32_t width, height; 828 CHECK(format->findInt32("width", &width)); 829 CHECK(format->findInt32("height", &height)); 830 831 int32_t cropLeft, cropTop, cropRight, cropBottom; 832 CHECK(format->findRect( 833 "crop", 834 &cropLeft, &cropTop, &cropRight, &cropBottom)); 835 836 int32_t displayWidth = cropRight - cropLeft + 1; 837 int32_t displayHeight = cropBottom - cropTop + 1; 838 839 ALOGV("Video output format changed to %d x %d " 840 "(crop: %d x %d @ (%d, %d))", 841 width, height, 842 displayWidth, 843 displayHeight, 844 cropLeft, cropTop); 845 846 sp<AMessage> videoInputFormat = 847 mSource->getFormat(false /* audio */); 848 849 // Take into account sample aspect ratio if necessary: 850 int32_t sarWidth, sarHeight; 851 if (videoInputFormat->findInt32("sar-width", &sarWidth) 852 && videoInputFormat->findInt32( 853 "sar-height", &sarHeight)) { 854 ALOGV("Sample aspect ratio %d : %d", 855 sarWidth, sarHeight); 856 857 displayWidth = (displayWidth * sarWidth) / sarHeight; 858 859 ALOGV("display dimensions %d x %d", 860 displayWidth, displayHeight); 861 } 862 863 int32_t rotationDegrees; 864 if (!videoInputFormat->findInt32( 865 "rotation-degrees", &rotationDegrees)) { 866 rotationDegrees = 0; 867 } 868 869 if (rotationDegrees == 90 || rotationDegrees == 270) { 870 notifyListener( 871 MEDIA_SET_VIDEO_SIZE, 872 displayHeight, 873 displayWidth); 874 } else { 875 notifyListener( 876 MEDIA_SET_VIDEO_SIZE, 877 displayWidth, 878 displayHeight); 879 } 880 } 881 } else if (what == Decoder::kWhatShutdownCompleted) { 882 ALOGV("%s shutdown completed", audio ? "audio" : "video"); 883 if (audio) { 884 mAudioDecoder.clear(); 885 886 CHECK_EQ((int)mFlushingAudio, (int)SHUTTING_DOWN_DECODER); 887 mFlushingAudio = SHUT_DOWN; 888 } else { 889 mVideoDecoder.clear(); 890 891 CHECK_EQ((int)mFlushingVideo, (int)SHUTTING_DOWN_DECODER); 892 mFlushingVideo = SHUT_DOWN; 893 } 894 895 finishFlushIfPossible(); 896 } else if (what == Decoder::kWhatError) { 897 ALOGE("Received error from %s decoder, aborting playback.", 898 audio ? "audio" : "video"); 899 900 mRenderer->queueEOS(audio, UNKNOWN_ERROR); 901 } else if (what == Decoder::kWhatDrainThisBuffer) { 902 renderBuffer(audio, msg); 903 } else { 904 ALOGV("Unhandled decoder notification %d '%c%c%c%c'.", 905 what, 906 what >> 24, 907 (what >> 16) & 0xff, 908 (what >> 8) & 0xff, 909 what & 0xff); 910 } 911 912 break; 913 } 914 915 case kWhatRendererNotify: 916 { 917 int32_t what; 918 CHECK(msg->findInt32("what", &what)); 919 920 if (what == Renderer::kWhatEOS) { 921 int32_t audio; 922 CHECK(msg->findInt32("audio", &audio)); 923 924 int32_t finalResult; 925 CHECK(msg->findInt32("finalResult", &finalResult)); 926 927 if (audio) { 928 mAudioEOS = true; 929 } else { 930 mVideoEOS = true; 931 } 932 933 if (finalResult == ERROR_END_OF_STREAM) { 934 ALOGV("reached %s EOS", audio ? "audio" : "video"); 935 } else { 936 ALOGE("%s track encountered an error (%d)", 937 audio ? "audio" : "video", finalResult); 938 939 notifyListener( 940 MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, finalResult); 941 } 942 943 if ((mAudioEOS || mAudioDecoder == NULL) 944 && (mVideoEOS || mVideoDecoder == NULL)) { 945 notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0); 946 } 947 } else if (what == Renderer::kWhatPosition) { 948 int64_t positionUs; 949 CHECK(msg->findInt64("positionUs", &positionUs)); 950 951 CHECK(msg->findInt64("videoLateByUs", &mVideoLateByUs)); 952 953 if (mDriver != NULL) { 954 sp<NuPlayerDriver> driver = mDriver.promote(); 955 if (driver != NULL) { 956 driver->notifyPosition(positionUs); 957 958 driver->notifyFrameStats( 959 mNumFramesTotal, mNumFramesDropped); 960 } 961 } 962 } else if (what == Renderer::kWhatFlushComplete) { 963 int32_t audio; 964 CHECK(msg->findInt32("audio", &audio)); 965 966 ALOGV("renderer %s flush completed.", audio ? "audio" : "video"); 967 } else if (what == Renderer::kWhatVideoRenderingStart) { 968 notifyListener(MEDIA_INFO, MEDIA_INFO_RENDERING_START, 0); 969 } else if (what == Renderer::kWhatMediaRenderingStart) { 970 ALOGV("media rendering started"); 971 notifyListener(MEDIA_STARTED, 0, 0); 972 } else if (what == Renderer::kWhatAudioOffloadTearDown) { 973 ALOGV("Tear down audio offload, fall back to s/w path"); 974 int64_t positionUs; 975 CHECK(msg->findInt64("positionUs", &positionUs)); 976 mAudioSink->close(); 977 mAudioDecoder.clear(); 978 mRenderer->flush(true /* audio */); 979 if (mVideoDecoder != NULL) { 980 mRenderer->flush(false /* audio */); 981 } 982 mRenderer->signalDisableOffloadAudio(); 983 mOffloadAudio = false; 984 985 performSeek(positionUs); 986 instantiateDecoder(true /* audio */, &mAudioDecoder); 987 } 988 break; 989 } 990 991 case kWhatMoreDataQueued: 992 { 993 break; 994 } 995 996 case kWhatReset: 997 { 998 ALOGV("kWhatReset"); 999 1000 mDeferredActions.push_back( 1001 new ShutdownDecoderAction( 1002 true /* audio */, true /* video */)); 1003 1004 mDeferredActions.push_back( 1005 new SimpleAction(&NuPlayer::performReset)); 1006 1007 processDeferredActions(); 1008 break; 1009 } 1010 1011 case kWhatSeek: 1012 { 1013 int64_t seekTimeUs; 1014 CHECK(msg->findInt64("seekTimeUs", &seekTimeUs)); 1015 1016 ALOGV("kWhatSeek seekTimeUs=%lld us", seekTimeUs); 1017 1018 mDeferredActions.push_back( 1019 new SimpleAction(&NuPlayer::performDecoderFlush)); 1020 1021 mDeferredActions.push_back(new SeekAction(seekTimeUs)); 1022 1023 processDeferredActions(); 1024 break; 1025 } 1026 1027 case kWhatPause: 1028 { 1029 CHECK(mRenderer != NULL); 1030 mSource->pause(); 1031 mRenderer->pause(); 1032 break; 1033 } 1034 1035 case kWhatResume: 1036 { 1037 CHECK(mRenderer != NULL); 1038 mSource->resume(); 1039 mRenderer->resume(); 1040 break; 1041 } 1042 1043 case kWhatSourceNotify: 1044 { 1045 onSourceNotify(msg); 1046 break; 1047 } 1048 1049 case kWhatClosedCaptionNotify: 1050 { 1051 onClosedCaptionNotify(msg); 1052 break; 1053 } 1054 1055 default: 1056 TRESPASS(); 1057 break; 1058 } 1059} 1060 1061void NuPlayer::finishFlushIfPossible() { 1062 if (mFlushingAudio != FLUSHED && mFlushingAudio != SHUT_DOWN) { 1063 return; 1064 } 1065 1066 if (mFlushingVideo != FLUSHED && mFlushingVideo != SHUT_DOWN) { 1067 return; 1068 } 1069 1070 ALOGV("both audio and video are flushed now."); 1071 1072 if (mTimeDiscontinuityPending) { 1073 mRenderer->signalTimeDiscontinuity(); 1074 mTimeDiscontinuityPending = false; 1075 } 1076 1077 if (mAudioDecoder != NULL) { 1078 mAudioDecoder->signalResume(); 1079 } 1080 1081 if (mVideoDecoder != NULL) { 1082 mVideoDecoder->signalResume(); 1083 } 1084 1085 mFlushingAudio = NONE; 1086 mFlushingVideo = NONE; 1087 1088 processDeferredActions(); 1089} 1090 1091void NuPlayer::postScanSources() { 1092 if (mScanSourcesPending) { 1093 return; 1094 } 1095 1096 sp<AMessage> msg = new AMessage(kWhatScanSources, id()); 1097 msg->setInt32("generation", mScanSourcesGeneration); 1098 msg->post(); 1099 1100 mScanSourcesPending = true; 1101} 1102 1103status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) { 1104 if (*decoder != NULL) { 1105 return OK; 1106 } 1107 1108 sp<AMessage> format = mSource->getFormat(audio); 1109 1110 if (format == NULL) { 1111 return -EWOULDBLOCK; 1112 } 1113 1114 if (!audio) { 1115 AString mime; 1116 CHECK(format->findString("mime", &mime)); 1117 mVideoIsAVC = !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime.c_str()); 1118 1119 sp<AMessage> ccNotify = new AMessage(kWhatClosedCaptionNotify, id()); 1120 mCCDecoder = new CCDecoder(ccNotify); 1121 1122 if (mSourceFlags & Source::FLAG_SECURE) { 1123 format->setInt32("secure", true); 1124 } 1125 } 1126 1127 sp<AMessage> notify = 1128 new AMessage(audio ? kWhatAudioNotify : kWhatVideoNotify, 1129 id()); 1130 1131 if (audio) { 1132 if (mOffloadAudio) { 1133 *decoder = new DecoderPassThrough(notify); 1134 } else { 1135 *decoder = new Decoder(notify); 1136 } 1137 } else { 1138 *decoder = new Decoder(notify, mNativeWindow); 1139 } 1140 (*decoder)->init(); 1141 (*decoder)->configure(format); 1142 1143 // allocate buffers to decrypt widevine source buffers 1144 if (!audio && (mSourceFlags & Source::FLAG_SECURE)) { 1145 Vector<sp<ABuffer> > inputBufs; 1146 CHECK_EQ((*decoder)->getInputBuffers(&inputBufs), (status_t)OK); 1147 1148 Vector<MediaBuffer *> mediaBufs; 1149 for (size_t i = 0; i < inputBufs.size(); i++) { 1150 const sp<ABuffer> &buffer = inputBufs[i]; 1151 MediaBuffer *mbuf = new MediaBuffer(buffer->data(), buffer->size()); 1152 mediaBufs.push(mbuf); 1153 } 1154 1155 status_t err = mSource->setBuffers(audio, mediaBufs); 1156 if (err != OK) { 1157 for (size_t i = 0; i < mediaBufs.size(); ++i) { 1158 mediaBufs[i]->release(); 1159 } 1160 mediaBufs.clear(); 1161 ALOGE("Secure source didn't support secure mediaBufs."); 1162 return err; 1163 } 1164 } 1165 return OK; 1166} 1167 1168status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) { 1169 sp<AMessage> reply; 1170 CHECK(msg->findMessage("reply", &reply)); 1171 1172 if ((audio && IsFlushingState(mFlushingAudio)) 1173 || (!audio && IsFlushingState(mFlushingVideo))) { 1174 reply->setInt32("err", INFO_DISCONTINUITY); 1175 reply->post(); 1176 return OK; 1177 } 1178 1179 sp<ABuffer> accessUnit; 1180 1181 bool dropAccessUnit; 1182 do { 1183 status_t err = mSource->dequeueAccessUnit(audio, &accessUnit); 1184 1185 if (err == -EWOULDBLOCK) { 1186 return err; 1187 } else if (err != OK) { 1188 if (err == INFO_DISCONTINUITY) { 1189 int32_t type; 1190 CHECK(accessUnit->meta()->findInt32("discontinuity", &type)); 1191 1192 bool formatChange = 1193 (audio && 1194 (type & ATSParser::DISCONTINUITY_AUDIO_FORMAT)) 1195 || (!audio && 1196 (type & ATSParser::DISCONTINUITY_VIDEO_FORMAT)); 1197 1198 bool timeChange = (type & ATSParser::DISCONTINUITY_TIME) != 0; 1199 1200 ALOGI("%s discontinuity (formatChange=%d, time=%d)", 1201 audio ? "audio" : "video", formatChange, timeChange); 1202 1203 if (audio) { 1204 mSkipRenderingAudioUntilMediaTimeUs = -1; 1205 } else { 1206 mSkipRenderingVideoUntilMediaTimeUs = -1; 1207 } 1208 1209 if (timeChange) { 1210 sp<AMessage> extra; 1211 if (accessUnit->meta()->findMessage("extra", &extra) 1212 && extra != NULL) { 1213 int64_t resumeAtMediaTimeUs; 1214 if (extra->findInt64( 1215 "resume-at-mediatimeUs", &resumeAtMediaTimeUs)) { 1216 ALOGI("suppressing rendering of %s until %lld us", 1217 audio ? "audio" : "video", resumeAtMediaTimeUs); 1218 1219 if (audio) { 1220 mSkipRenderingAudioUntilMediaTimeUs = 1221 resumeAtMediaTimeUs; 1222 } else { 1223 mSkipRenderingVideoUntilMediaTimeUs = 1224 resumeAtMediaTimeUs; 1225 } 1226 } 1227 } 1228 } 1229 1230 mTimeDiscontinuityPending = 1231 mTimeDiscontinuityPending || timeChange; 1232 1233 if (mFlushingAudio == NONE && mFlushingVideo == NONE) { 1234 // And we'll resume scanning sources once we're done 1235 // flushing. 1236 mDeferredActions.push_front( 1237 new SimpleAction( 1238 &NuPlayer::performScanSources)); 1239 } 1240 1241 if (formatChange || timeChange) { 1242 1243 sp<AMessage> newFormat = mSource->getFormat(audio); 1244 sp<Decoder> &decoder = audio ? mAudioDecoder : mVideoDecoder; 1245 if (formatChange && !decoder->supportsSeamlessFormatChange(newFormat)) { 1246 flushDecoder(audio, /* needShutdown = */ true); 1247 } else { 1248 flushDecoder(audio, /* needShutdown = */ false); 1249 err = OK; 1250 } 1251 } else { 1252 // This stream is unaffected by the discontinuity 1253 1254 if (audio) { 1255 mFlushingAudio = FLUSHED; 1256 } else { 1257 mFlushingVideo = FLUSHED; 1258 } 1259 1260 finishFlushIfPossible(); 1261 1262 return -EWOULDBLOCK; 1263 } 1264 } 1265 1266 reply->setInt32("err", err); 1267 reply->post(); 1268 return OK; 1269 } 1270 1271 if (!audio) { 1272 ++mNumFramesTotal; 1273 } 1274 1275 dropAccessUnit = false; 1276 if (!audio 1277 && !(mSourceFlags & Source::FLAG_SECURE) 1278 && mVideoLateByUs > 100000ll 1279 && mVideoIsAVC 1280 && !IsAVCReferenceFrame(accessUnit)) { 1281 dropAccessUnit = true; 1282 ++mNumFramesDropped; 1283 } 1284 } while (dropAccessUnit); 1285 1286 // ALOGV("returned a valid buffer of %s data", audio ? "audio" : "video"); 1287 1288#if 0 1289 int64_t mediaTimeUs; 1290 CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs)); 1291 ALOGV("feeding %s input buffer at media time %.2f secs", 1292 audio ? "audio" : "video", 1293 mediaTimeUs / 1E6); 1294#endif 1295 1296 if (!audio) { 1297 mCCDecoder->decode(accessUnit); 1298 } 1299 1300 reply->setBuffer("buffer", accessUnit); 1301 reply->post(); 1302 1303 return OK; 1304} 1305 1306void NuPlayer::renderBuffer(bool audio, const sp<AMessage> &msg) { 1307 // ALOGV("renderBuffer %s", audio ? "audio" : "video"); 1308 1309 sp<AMessage> reply; 1310 CHECK(msg->findMessage("reply", &reply)); 1311 1312 if (IsFlushingState(audio ? mFlushingAudio : mFlushingVideo)) { 1313 // We're currently attempting to flush the decoder, in order 1314 // to complete this, the decoder wants all its buffers back, 1315 // so we don't want any output buffers it sent us (from before 1316 // we initiated the flush) to be stuck in the renderer's queue. 1317 1318 ALOGV("we're still flushing the %s decoder, sending its output buffer" 1319 " right back.", audio ? "audio" : "video"); 1320 1321 reply->post(); 1322 return; 1323 } 1324 1325 sp<ABuffer> buffer; 1326 CHECK(msg->findBuffer("buffer", &buffer)); 1327 1328 int64_t mediaTimeUs; 1329 CHECK(buffer->meta()->findInt64("timeUs", &mediaTimeUs)); 1330 1331 int64_t &skipUntilMediaTimeUs = 1332 audio 1333 ? mSkipRenderingAudioUntilMediaTimeUs 1334 : mSkipRenderingVideoUntilMediaTimeUs; 1335 1336 if (skipUntilMediaTimeUs >= 0) { 1337 1338 if (mediaTimeUs < skipUntilMediaTimeUs) { 1339 ALOGV("dropping %s buffer at time %lld as requested.", 1340 audio ? "audio" : "video", 1341 mediaTimeUs); 1342 1343 reply->post(); 1344 return; 1345 } 1346 1347 skipUntilMediaTimeUs = -1; 1348 } 1349 1350 if (!audio && mCCDecoder->isSelected()) { 1351 mCCDecoder->display(mediaTimeUs); 1352 } 1353 1354 mRenderer->queueBuffer(audio, buffer, reply); 1355} 1356 1357void NuPlayer::notifyListener(int msg, int ext1, int ext2, const Parcel *in) { 1358 if (mDriver == NULL) { 1359 return; 1360 } 1361 1362 sp<NuPlayerDriver> driver = mDriver.promote(); 1363 1364 if (driver == NULL) { 1365 return; 1366 } 1367 1368 driver->notifyListener(msg, ext1, ext2, in); 1369} 1370 1371void NuPlayer::flushDecoder(bool audio, bool needShutdown) { 1372 ALOGV("[%s] flushDecoder needShutdown=%d", 1373 audio ? "audio" : "video", needShutdown); 1374 1375 if ((audio && mAudioDecoder == NULL) || (!audio && mVideoDecoder == NULL)) { 1376 ALOGI("flushDecoder %s without decoder present", 1377 audio ? "audio" : "video"); 1378 } 1379 1380 // Make sure we don't continue to scan sources until we finish flushing. 1381 ++mScanSourcesGeneration; 1382 mScanSourcesPending = false; 1383 1384 (audio ? mAudioDecoder : mVideoDecoder)->signalFlush(); 1385 mRenderer->flush(audio); 1386 1387 FlushStatus newStatus = 1388 needShutdown ? FLUSHING_DECODER_SHUTDOWN : FLUSHING_DECODER; 1389 1390 if (audio) { 1391 CHECK(mFlushingAudio == NONE 1392 || mFlushingAudio == AWAITING_DISCONTINUITY); 1393 1394 mFlushingAudio = newStatus; 1395 1396 if (mFlushingVideo == NONE) { 1397 mFlushingVideo = (mVideoDecoder != NULL) 1398 ? AWAITING_DISCONTINUITY 1399 : FLUSHED; 1400 } 1401 } else { 1402 CHECK(mFlushingVideo == NONE 1403 || mFlushingVideo == AWAITING_DISCONTINUITY); 1404 1405 mFlushingVideo = newStatus; 1406 1407 if (mFlushingAudio == NONE) { 1408 mFlushingAudio = (mAudioDecoder != NULL) 1409 ? AWAITING_DISCONTINUITY 1410 : FLUSHED; 1411 } 1412 } 1413} 1414 1415sp<AMessage> NuPlayer::Source::getFormat(bool audio) { 1416 sp<MetaData> meta = getFormatMeta(audio); 1417 1418 if (meta == NULL) { 1419 return NULL; 1420 } 1421 1422 sp<AMessage> msg = new AMessage; 1423 1424 if(convertMetaDataToMessage(meta, &msg) == OK) { 1425 return msg; 1426 } 1427 return NULL; 1428} 1429 1430status_t NuPlayer::setVideoScalingMode(int32_t mode) { 1431 mVideoScalingMode = mode; 1432 if (mNativeWindow != NULL) { 1433 status_t ret = native_window_set_scaling_mode( 1434 mNativeWindow->getNativeWindow().get(), mVideoScalingMode); 1435 if (ret != OK) { 1436 ALOGE("Failed to set scaling mode (%d): %s", 1437 -ret, strerror(-ret)); 1438 return ret; 1439 } 1440 } 1441 return OK; 1442} 1443 1444status_t NuPlayer::getTrackInfo(Parcel* reply) const { 1445 sp<AMessage> msg = new AMessage(kWhatGetTrackInfo, id()); 1446 msg->setPointer("reply", reply); 1447 1448 sp<AMessage> response; 1449 status_t err = msg->postAndAwaitResponse(&response); 1450 return err; 1451} 1452 1453status_t NuPlayer::selectTrack(size_t trackIndex, bool select) { 1454 sp<AMessage> msg = new AMessage(kWhatSelectTrack, id()); 1455 msg->setSize("trackIndex", trackIndex); 1456 msg->setInt32("select", select); 1457 1458 sp<AMessage> response; 1459 status_t err = msg->postAndAwaitResponse(&response); 1460 1461 if (err != OK) { 1462 return err; 1463 } 1464 1465 if (!response->findInt32("err", &err)) { 1466 err = OK; 1467 } 1468 1469 return err; 1470} 1471 1472void NuPlayer::schedulePollDuration() { 1473 sp<AMessage> msg = new AMessage(kWhatPollDuration, id()); 1474 msg->setInt32("generation", mPollDurationGeneration); 1475 msg->post(); 1476} 1477 1478void NuPlayer::cancelPollDuration() { 1479 ++mPollDurationGeneration; 1480} 1481 1482void NuPlayer::processDeferredActions() { 1483 while (!mDeferredActions.empty()) { 1484 // We won't execute any deferred actions until we're no longer in 1485 // an intermediate state, i.e. one more more decoders are currently 1486 // flushing or shutting down. 1487 1488 if (mRenderer != NULL) { 1489 // There's an edge case where the renderer owns all output 1490 // buffers and is paused, therefore the decoder will not read 1491 // more input data and will never encounter the matching 1492 // discontinuity. To avoid this, we resume the renderer. 1493 1494 if (mFlushingAudio == AWAITING_DISCONTINUITY 1495 || mFlushingVideo == AWAITING_DISCONTINUITY) { 1496 mRenderer->resume(); 1497 } 1498 } 1499 1500 if (mFlushingAudio != NONE || mFlushingVideo != NONE) { 1501 // We're currently flushing, postpone the reset until that's 1502 // completed. 1503 1504 ALOGV("postponing action mFlushingAudio=%d, mFlushingVideo=%d", 1505 mFlushingAudio, mFlushingVideo); 1506 1507 break; 1508 } 1509 1510 sp<Action> action = *mDeferredActions.begin(); 1511 mDeferredActions.erase(mDeferredActions.begin()); 1512 1513 action->execute(this); 1514 } 1515} 1516 1517void NuPlayer::performSeek(int64_t seekTimeUs) { 1518 ALOGV("performSeek seekTimeUs=%lld us (%.2f secs)", 1519 seekTimeUs, 1520 seekTimeUs / 1E6); 1521 1522 mSource->seekTo(seekTimeUs); 1523 ++mTimedTextGeneration; 1524 1525 if (mDriver != NULL) { 1526 sp<NuPlayerDriver> driver = mDriver.promote(); 1527 if (driver != NULL) { 1528 driver->notifyPosition(seekTimeUs); 1529 driver->notifySeekComplete(); 1530 } 1531 } 1532 1533 // everything's flushed, continue playback. 1534} 1535 1536void NuPlayer::performDecoderFlush() { 1537 ALOGV("performDecoderFlush"); 1538 1539 if (mAudioDecoder == NULL && mVideoDecoder == NULL) { 1540 return; 1541 } 1542 1543 mTimeDiscontinuityPending = true; 1544 1545 if (mAudioDecoder != NULL) { 1546 flushDecoder(true /* audio */, false /* needShutdown */); 1547 } 1548 1549 if (mVideoDecoder != NULL) { 1550 flushDecoder(false /* audio */, false /* needShutdown */); 1551 } 1552} 1553 1554void NuPlayer::performDecoderShutdown(bool audio, bool video) { 1555 ALOGV("performDecoderShutdown audio=%d, video=%d", audio, video); 1556 1557 if ((!audio || mAudioDecoder == NULL) 1558 && (!video || mVideoDecoder == NULL)) { 1559 return; 1560 } 1561 1562 mTimeDiscontinuityPending = true; 1563 1564 if (mFlushingAudio == NONE && (!audio || mAudioDecoder == NULL)) { 1565 mFlushingAudio = FLUSHED; 1566 } 1567 1568 if (mFlushingVideo == NONE && (!video || mVideoDecoder == NULL)) { 1569 mFlushingVideo = FLUSHED; 1570 } 1571 1572 if (audio && mAudioDecoder != NULL) { 1573 flushDecoder(true /* audio */, true /* needShutdown */); 1574 } 1575 1576 if (video && mVideoDecoder != NULL) { 1577 flushDecoder(false /* audio */, true /* needShutdown */); 1578 } 1579} 1580 1581void NuPlayer::performReset() { 1582 ALOGV("performReset"); 1583 1584 CHECK(mAudioDecoder == NULL); 1585 CHECK(mVideoDecoder == NULL); 1586 1587 cancelPollDuration(); 1588 1589 ++mScanSourcesGeneration; 1590 mScanSourcesPending = false; 1591 1592 if (mRendererLooper != NULL) { 1593 if (mRenderer != NULL) { 1594 mRendererLooper->unregisterHandler(mRenderer->id()); 1595 } 1596 mRendererLooper->stop(); 1597 mRendererLooper.clear(); 1598 } 1599 mRenderer.clear(); 1600 1601 if (mSource != NULL) { 1602 mSource->stop(); 1603 1604 looper()->unregisterHandler(mSource->id()); 1605 1606 mSource.clear(); 1607 } 1608 1609 if (mDriver != NULL) { 1610 sp<NuPlayerDriver> driver = mDriver.promote(); 1611 if (driver != NULL) { 1612 driver->notifyResetComplete(); 1613 } 1614 } 1615 1616 mStarted = false; 1617} 1618 1619void NuPlayer::performScanSources() { 1620 ALOGV("performScanSources"); 1621 1622 if (!mStarted) { 1623 return; 1624 } 1625 1626 if (mAudioDecoder == NULL || mVideoDecoder == NULL) { 1627 postScanSources(); 1628 } 1629} 1630 1631void NuPlayer::performSetSurface(const sp<NativeWindowWrapper> &wrapper) { 1632 ALOGV("performSetSurface"); 1633 1634 mNativeWindow = wrapper; 1635 1636 // XXX - ignore error from setVideoScalingMode for now 1637 setVideoScalingMode(mVideoScalingMode); 1638 1639 if (mDriver != NULL) { 1640 sp<NuPlayerDriver> driver = mDriver.promote(); 1641 if (driver != NULL) { 1642 driver->notifySetSurfaceComplete(); 1643 } 1644 } 1645} 1646 1647void NuPlayer::onSourceNotify(const sp<AMessage> &msg) { 1648 int32_t what; 1649 CHECK(msg->findInt32("what", &what)); 1650 1651 switch (what) { 1652 case Source::kWhatPrepared: 1653 { 1654 if (mSource == NULL) { 1655 // This is a stale notification from a source that was 1656 // asynchronously preparing when the client called reset(). 1657 // We handled the reset, the source is gone. 1658 break; 1659 } 1660 1661 int32_t err; 1662 CHECK(msg->findInt32("err", &err)); 1663 1664 sp<NuPlayerDriver> driver = mDriver.promote(); 1665 if (driver != NULL) { 1666 // notify duration first, so that it's definitely set when 1667 // the app received the "prepare complete" callback. 1668 int64_t durationUs; 1669 if (mSource->getDuration(&durationUs) == OK) { 1670 driver->notifyDuration(durationUs); 1671 } 1672 driver->notifyPrepareCompleted(err); 1673 } 1674 1675 break; 1676 } 1677 1678 case Source::kWhatFlagsChanged: 1679 { 1680 uint32_t flags; 1681 CHECK(msg->findInt32("flags", (int32_t *)&flags)); 1682 1683 sp<NuPlayerDriver> driver = mDriver.promote(); 1684 if (driver != NULL) { 1685 driver->notifyFlagsChanged(flags); 1686 } 1687 1688 if ((mSourceFlags & Source::FLAG_DYNAMIC_DURATION) 1689 && (!(flags & Source::FLAG_DYNAMIC_DURATION))) { 1690 cancelPollDuration(); 1691 } else if (!(mSourceFlags & Source::FLAG_DYNAMIC_DURATION) 1692 && (flags & Source::FLAG_DYNAMIC_DURATION) 1693 && (mAudioDecoder != NULL || mVideoDecoder != NULL)) { 1694 schedulePollDuration(); 1695 } 1696 1697 mSourceFlags = flags; 1698 break; 1699 } 1700 1701 case Source::kWhatVideoSizeChanged: 1702 { 1703 int32_t width, height; 1704 CHECK(msg->findInt32("width", &width)); 1705 CHECK(msg->findInt32("height", &height)); 1706 1707 notifyListener(MEDIA_SET_VIDEO_SIZE, width, height); 1708 break; 1709 } 1710 1711 case Source::kWhatBufferingStart: 1712 { 1713 notifyListener(MEDIA_INFO, MEDIA_INFO_BUFFERING_START, 0); 1714 break; 1715 } 1716 1717 case Source::kWhatBufferingEnd: 1718 { 1719 notifyListener(MEDIA_INFO, MEDIA_INFO_BUFFERING_END, 0); 1720 break; 1721 } 1722 1723 case Source::kWhatSubtitleData: 1724 { 1725 sp<ABuffer> buffer; 1726 CHECK(msg->findBuffer("buffer", &buffer)); 1727 1728 sendSubtitleData(buffer, 0 /* baseIndex */); 1729 break; 1730 } 1731 1732 case Source::kWhatTimedTextData: 1733 { 1734 int32_t generation; 1735 if (msg->findInt32("generation", &generation) 1736 && generation != mTimedTextGeneration) { 1737 break; 1738 } 1739 1740 sp<ABuffer> buffer; 1741 CHECK(msg->findBuffer("buffer", &buffer)); 1742 1743 sp<NuPlayerDriver> driver = mDriver.promote(); 1744 if (driver == NULL) { 1745 break; 1746 } 1747 1748 int posMs; 1749 int64_t timeUs, posUs; 1750 driver->getCurrentPosition(&posMs); 1751 posUs = posMs * 1000; 1752 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 1753 1754 if (posUs < timeUs) { 1755 if (!msg->findInt32("generation", &generation)) { 1756 msg->setInt32("generation", mTimedTextGeneration); 1757 } 1758 msg->post(timeUs - posUs); 1759 } else { 1760 sendTimedTextData(buffer); 1761 } 1762 break; 1763 } 1764 1765 case Source::kWhatQueueDecoderShutdown: 1766 { 1767 int32_t audio, video; 1768 CHECK(msg->findInt32("audio", &audio)); 1769 CHECK(msg->findInt32("video", &video)); 1770 1771 sp<AMessage> reply; 1772 CHECK(msg->findMessage("reply", &reply)); 1773 1774 queueDecoderShutdown(audio, video, reply); 1775 break; 1776 } 1777 1778 default: 1779 TRESPASS(); 1780 } 1781} 1782 1783void NuPlayer::onClosedCaptionNotify(const sp<AMessage> &msg) { 1784 int32_t what; 1785 CHECK(msg->findInt32("what", &what)); 1786 1787 switch (what) { 1788 case NuPlayer::CCDecoder::kWhatClosedCaptionData: 1789 { 1790 sp<ABuffer> buffer; 1791 CHECK(msg->findBuffer("buffer", &buffer)); 1792 1793 size_t inbandTracks = 0; 1794 if (mSource != NULL) { 1795 inbandTracks = mSource->getTrackCount(); 1796 } 1797 1798 sendSubtitleData(buffer, inbandTracks); 1799 break; 1800 } 1801 1802 case NuPlayer::CCDecoder::kWhatTrackAdded: 1803 { 1804 notifyListener(MEDIA_INFO, MEDIA_INFO_METADATA_UPDATE, 0); 1805 1806 break; 1807 } 1808 1809 default: 1810 TRESPASS(); 1811 } 1812 1813 1814} 1815 1816void NuPlayer::sendSubtitleData(const sp<ABuffer> &buffer, int32_t baseIndex) { 1817 int32_t trackIndex; 1818 int64_t timeUs, durationUs; 1819 CHECK(buffer->meta()->findInt32("trackIndex", &trackIndex)); 1820 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 1821 CHECK(buffer->meta()->findInt64("durationUs", &durationUs)); 1822 1823 Parcel in; 1824 in.writeInt32(trackIndex + baseIndex); 1825 in.writeInt64(timeUs); 1826 in.writeInt64(durationUs); 1827 in.writeInt32(buffer->size()); 1828 in.writeInt32(buffer->size()); 1829 in.write(buffer->data(), buffer->size()); 1830 1831 notifyListener(MEDIA_SUBTITLE_DATA, 0, 0, &in); 1832} 1833 1834void NuPlayer::sendTimedTextData(const sp<ABuffer> &buffer) { 1835 const void *data; 1836 size_t size = 0; 1837 int64_t timeUs; 1838 int32_t flag = TextDescriptions::LOCAL_DESCRIPTIONS; 1839 1840 AString mime; 1841 CHECK(buffer->meta()->findString("mime", &mime)); 1842 CHECK(strcasecmp(mime.c_str(), MEDIA_MIMETYPE_TEXT_3GPP) == 0); 1843 1844 data = buffer->data(); 1845 size = buffer->size(); 1846 1847 Parcel parcel; 1848 if (size > 0) { 1849 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 1850 flag |= TextDescriptions::IN_BAND_TEXT_3GPP; 1851 TextDescriptions::getParcelOfDescriptions( 1852 (const uint8_t *)data, size, flag, timeUs / 1000, &parcel); 1853 } 1854 1855 if ((parcel.dataSize() > 0)) { 1856 notifyListener(MEDIA_TIMED_TEXT, 0, 0, &parcel); 1857 } else { // send an empty timed text 1858 notifyListener(MEDIA_TIMED_TEXT, 0, 0); 1859 } 1860} 1861//////////////////////////////////////////////////////////////////////////////// 1862 1863void NuPlayer::Source::notifyFlagsChanged(uint32_t flags) { 1864 sp<AMessage> notify = dupNotify(); 1865 notify->setInt32("what", kWhatFlagsChanged); 1866 notify->setInt32("flags", flags); 1867 notify->post(); 1868} 1869 1870void NuPlayer::Source::notifyVideoSizeChanged(int32_t width, int32_t height) { 1871 sp<AMessage> notify = dupNotify(); 1872 notify->setInt32("what", kWhatVideoSizeChanged); 1873 notify->setInt32("width", width); 1874 notify->setInt32("height", height); 1875 notify->post(); 1876} 1877 1878void NuPlayer::Source::notifyPrepared(status_t err) { 1879 sp<AMessage> notify = dupNotify(); 1880 notify->setInt32("what", kWhatPrepared); 1881 notify->setInt32("err", err); 1882 notify->post(); 1883} 1884 1885void NuPlayer::Source::onMessageReceived(const sp<AMessage> & /* msg */) { 1886 TRESPASS(); 1887} 1888 1889void NuPlayer::queueDecoderShutdown( 1890 bool audio, bool video, const sp<AMessage> &reply) { 1891 ALOGI("queueDecoderShutdown audio=%d, video=%d", audio, video); 1892 1893 mDeferredActions.push_back( 1894 new ShutdownDecoderAction(audio, video)); 1895 1896 mDeferredActions.push_back( 1897 new SimpleAction(&NuPlayer::performScanSources)); 1898 1899 mDeferredActions.push_back(new PostMessageAction(reply)); 1900 1901 processDeferredActions(); 1902} 1903 1904} // namespace android 1905