NuPlayer.cpp revision e46711343b43786c049a007369a72c1c78e6c5db
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "NuPlayer" 19#include <utils/Log.h> 20 21#include "NuPlayer.h" 22 23#include "HTTPLiveSource.h" 24#include "NuPlayerDecoder.h" 25#include "NuPlayerDecoderPassThrough.h" 26#include "NuPlayerDriver.h" 27#include "NuPlayerRenderer.h" 28#include "NuPlayerSource.h" 29#include "RTSPSource.h" 30#include "StreamingSource.h" 31#include "GenericSource.h" 32#include "TextDescriptions.h" 33 34#include "ATSParser.h" 35 36#include <media/stagefright/foundation/hexdump.h> 37#include <media/stagefright/foundation/ABuffer.h> 38#include <media/stagefright/foundation/ADebug.h> 39#include <media/stagefright/foundation/AMessage.h> 40#include <media/stagefright/MediaBuffer.h> 41#include <media/stagefright/MediaDefs.h> 42#include <media/stagefright/MediaErrors.h> 43#include <media/stagefright/MetaData.h> 44#include <gui/IGraphicBufferProducer.h> 45 46#include "avc_utils.h" 47 48#include "ESDS.h" 49#include <media/stagefright/Utils.h> 50 51namespace android { 52 53struct NuPlayer::Action : public RefBase { 54 Action() {} 55 56 virtual void execute(NuPlayer *player) = 0; 57 58private: 59 DISALLOW_EVIL_CONSTRUCTORS(Action); 60}; 61 62struct NuPlayer::SeekAction : public Action { 63 SeekAction(int64_t seekTimeUs) 64 : mSeekTimeUs(seekTimeUs) { 65 } 66 67 virtual void execute(NuPlayer *player) { 68 player->performSeek(mSeekTimeUs); 69 } 70 71private: 72 int64_t mSeekTimeUs; 73 74 DISALLOW_EVIL_CONSTRUCTORS(SeekAction); 75}; 76 77struct NuPlayer::SetSurfaceAction : public Action { 78 SetSurfaceAction(const sp<NativeWindowWrapper> &wrapper) 79 : mWrapper(wrapper) { 80 } 81 82 virtual void execute(NuPlayer *player) { 83 player->performSetSurface(mWrapper); 84 } 85 86private: 87 sp<NativeWindowWrapper> mWrapper; 88 89 DISALLOW_EVIL_CONSTRUCTORS(SetSurfaceAction); 90}; 91 92struct NuPlayer::ShutdownDecoderAction : public Action { 93 ShutdownDecoderAction(bool audio, bool video) 94 : mAudio(audio), 95 mVideo(video) { 96 } 97 98 virtual void execute(NuPlayer *player) { 99 player->performDecoderShutdown(mAudio, mVideo); 100 } 101 102private: 103 bool mAudio; 104 bool mVideo; 105 106 DISALLOW_EVIL_CONSTRUCTORS(ShutdownDecoderAction); 107}; 108 109struct NuPlayer::PostMessageAction : public Action { 110 PostMessageAction(const sp<AMessage> &msg) 111 : mMessage(msg) { 112 } 113 114 virtual void execute(NuPlayer *) { 115 mMessage->post(); 116 } 117 118private: 119 sp<AMessage> mMessage; 120 121 DISALLOW_EVIL_CONSTRUCTORS(PostMessageAction); 122}; 123 124// Use this if there's no state necessary to save in order to execute 125// the action. 126struct NuPlayer::SimpleAction : public Action { 127 typedef void (NuPlayer::*ActionFunc)(); 128 129 SimpleAction(ActionFunc func) 130 : mFunc(func) { 131 } 132 133 virtual void execute(NuPlayer *player) { 134 (player->*mFunc)(); 135 } 136 137private: 138 ActionFunc mFunc; 139 140 DISALLOW_EVIL_CONSTRUCTORS(SimpleAction); 141}; 142 143//////////////////////////////////////////////////////////////////////////////// 144 145NuPlayer::NuPlayer() 146 : mUIDValid(false), 147 mSourceFlags(0), 148 mVideoIsAVC(false), 149 mOffloadAudio(false), 150 mAudioEOS(false), 151 mVideoEOS(false), 152 mScanSourcesPending(false), 153 mScanSourcesGeneration(0), 154 mPollDurationGeneration(0), 155 mTimedTextGeneration(0), 156 mTimeDiscontinuityPending(false), 157 mFlushingAudio(NONE), 158 mFlushingVideo(NONE), 159 mSkipRenderingAudioUntilMediaTimeUs(-1ll), 160 mSkipRenderingVideoUntilMediaTimeUs(-1ll), 161 mVideoLateByUs(0ll), 162 mNumFramesTotal(0ll), 163 mNumFramesDropped(0ll), 164 mVideoScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW), 165 mStarted(false) { 166} 167 168NuPlayer::~NuPlayer() { 169} 170 171void NuPlayer::setUID(uid_t uid) { 172 mUIDValid = true; 173 mUID = uid; 174} 175 176void NuPlayer::setDriver(const wp<NuPlayerDriver> &driver) { 177 mDriver = driver; 178} 179 180void NuPlayer::setDataSourceAsync(const sp<IStreamSource> &source) { 181 sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); 182 183 sp<AMessage> notify = new AMessage(kWhatSourceNotify, id()); 184 185 msg->setObject("source", new StreamingSource(notify, source)); 186 msg->post(); 187} 188 189static bool IsHTTPLiveURL(const char *url) { 190 if (!strncasecmp("http://", url, 7) 191 || !strncasecmp("https://", url, 8) 192 || !strncasecmp("file://", url, 7)) { 193 size_t len = strlen(url); 194 if (len >= 5 && !strcasecmp(".m3u8", &url[len - 5])) { 195 return true; 196 } 197 198 if (strstr(url,"m3u8")) { 199 return true; 200 } 201 } 202 203 return false; 204} 205 206void NuPlayer::setDataSourceAsync( 207 const sp<IMediaHTTPService> &httpService, 208 const char *url, 209 const KeyedVector<String8, String8> *headers) { 210 sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); 211 size_t len = strlen(url); 212 213 sp<AMessage> notify = new AMessage(kWhatSourceNotify, id()); 214 215 sp<Source> source; 216 if (IsHTTPLiveURL(url)) { 217 source = new HTTPLiveSource(notify, httpService, url, headers); 218 } else if (!strncasecmp(url, "rtsp://", 7)) { 219 source = new RTSPSource( 220 notify, httpService, url, headers, mUIDValid, mUID); 221 } else if ((!strncasecmp(url, "http://", 7) 222 || !strncasecmp(url, "https://", 8)) 223 && ((len >= 4 && !strcasecmp(".sdp", &url[len - 4])) 224 || strstr(url, ".sdp?"))) { 225 source = new RTSPSource( 226 notify, httpService, url, headers, mUIDValid, mUID, true); 227 } else if ((!strncasecmp(url, "widevine://", 11))) { 228 source = new GenericSource(notify, httpService, url, headers, 229 true /* isWidevine */, mUIDValid, mUID); 230 // Don't set FLAG_SECURE on mSourceFlags here, the correct flags 231 // will be updated in Source::kWhatFlagsChanged handler when 232 // GenericSource is prepared. 233 } else { 234 source = new GenericSource(notify, httpService, url, headers); 235 } 236 237 msg->setObject("source", source); 238 msg->post(); 239} 240 241void NuPlayer::setDataSourceAsync(int fd, int64_t offset, int64_t length) { 242 sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); 243 244 sp<AMessage> notify = new AMessage(kWhatSourceNotify, id()); 245 246 sp<Source> source = new GenericSource(notify, fd, offset, length); 247 msg->setObject("source", source); 248 msg->post(); 249} 250 251void NuPlayer::prepareAsync() { 252 (new AMessage(kWhatPrepare, id()))->post(); 253} 254 255void NuPlayer::setVideoSurfaceTextureAsync( 256 const sp<IGraphicBufferProducer> &bufferProducer) { 257 sp<AMessage> msg = new AMessage(kWhatSetVideoNativeWindow, id()); 258 259 if (bufferProducer == NULL) { 260 msg->setObject("native-window", NULL); 261 } else { 262 msg->setObject( 263 "native-window", 264 new NativeWindowWrapper( 265 new Surface(bufferProducer))); 266 } 267 268 msg->post(); 269} 270 271void NuPlayer::setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink) { 272 sp<AMessage> msg = new AMessage(kWhatSetAudioSink, id()); 273 msg->setObject("sink", sink); 274 msg->post(); 275} 276 277void NuPlayer::start() { 278 (new AMessage(kWhatStart, id()))->post(); 279} 280 281void NuPlayer::pause() { 282 (new AMessage(kWhatPause, id()))->post(); 283} 284 285void NuPlayer::resume() { 286 (new AMessage(kWhatResume, id()))->post(); 287} 288 289void NuPlayer::resetAsync() { 290 (new AMessage(kWhatReset, id()))->post(); 291} 292 293void NuPlayer::seekToAsync(int64_t seekTimeUs) { 294 sp<AMessage> msg = new AMessage(kWhatSeek, id()); 295 msg->setInt64("seekTimeUs", seekTimeUs); 296 msg->post(); 297} 298 299// static 300bool NuPlayer::IsFlushingState(FlushStatus state, bool *needShutdown) { 301 switch (state) { 302 case FLUSHING_DECODER: 303 if (needShutdown != NULL) { 304 *needShutdown = false; 305 } 306 return true; 307 308 case FLUSHING_DECODER_SHUTDOWN: 309 if (needShutdown != NULL) { 310 *needShutdown = true; 311 } 312 return true; 313 314 default: 315 return false; 316 } 317} 318 319void NuPlayer::writeTrackInfo( 320 Parcel* reply, const sp<AMessage> format) const { 321 int32_t trackType; 322 CHECK(format->findInt32("type", &trackType)); 323 324 AString lang; 325 CHECK(format->findString("language", &lang)); 326 327 reply->writeInt32(2); // write something non-zero 328 reply->writeInt32(trackType); 329 reply->writeString16(String16(lang.c_str())); 330 331 if (trackType == MEDIA_TRACK_TYPE_SUBTITLE) { 332 AString mime; 333 CHECK(format->findString("mime", &mime)); 334 335 int32_t isAuto, isDefault, isForced; 336 CHECK(format->findInt32("auto", &isAuto)); 337 CHECK(format->findInt32("default", &isDefault)); 338 CHECK(format->findInt32("forced", &isForced)); 339 340 reply->writeString16(String16(mime.c_str())); 341 reply->writeInt32(isAuto); 342 reply->writeInt32(isDefault); 343 reply->writeInt32(isForced); 344 } 345} 346 347void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { 348 switch (msg->what()) { 349 case kWhatSetDataSource: 350 { 351 ALOGV("kWhatSetDataSource"); 352 353 CHECK(mSource == NULL); 354 355 sp<RefBase> obj; 356 CHECK(msg->findObject("source", &obj)); 357 358 mSource = static_cast<Source *>(obj.get()); 359 360 looper()->registerHandler(mSource); 361 362 CHECK(mDriver != NULL); 363 sp<NuPlayerDriver> driver = mDriver.promote(); 364 if (driver != NULL) { 365 driver->notifySetDataSourceCompleted(OK); 366 } 367 break; 368 } 369 370 case kWhatPrepare: 371 { 372 mSource->prepareAsync(); 373 break; 374 } 375 376 case kWhatGetTrackInfo: 377 { 378 uint32_t replyID; 379 CHECK(msg->senderAwaitsResponse(&replyID)); 380 381 Parcel* reply; 382 CHECK(msg->findPointer("reply", (void**)&reply)); 383 384 size_t inbandTracks = 0; 385 if (mSource != NULL) { 386 inbandTracks = mSource->getTrackCount(); 387 } 388 389 size_t ccTracks = 0; 390 if (mCCDecoder != NULL) { 391 ccTracks = mCCDecoder->getTrackCount(); 392 } 393 394 // total track count 395 reply->writeInt32(inbandTracks + ccTracks); 396 397 // write inband tracks 398 for (size_t i = 0; i < inbandTracks; ++i) { 399 writeTrackInfo(reply, mSource->getTrackInfo(i)); 400 } 401 402 // write CC track 403 for (size_t i = 0; i < ccTracks; ++i) { 404 writeTrackInfo(reply, mCCDecoder->getTrackInfo(i)); 405 } 406 407 sp<AMessage> response = new AMessage; 408 response->postReply(replyID); 409 break; 410 } 411 412 case kWhatSelectTrack: 413 { 414 uint32_t replyID; 415 CHECK(msg->senderAwaitsResponse(&replyID)); 416 417 size_t trackIndex; 418 int32_t select; 419 CHECK(msg->findSize("trackIndex", &trackIndex)); 420 CHECK(msg->findInt32("select", &select)); 421 422 status_t err = INVALID_OPERATION; 423 424 size_t inbandTracks = 0; 425 if (mSource != NULL) { 426 inbandTracks = mSource->getTrackCount(); 427 } 428 size_t ccTracks = 0; 429 if (mCCDecoder != NULL) { 430 ccTracks = mCCDecoder->getTrackCount(); 431 } 432 433 if (trackIndex < inbandTracks) { 434 err = mSource->selectTrack(trackIndex, select); 435 436 if (!select && err == OK) { 437 int32_t type; 438 sp<AMessage> info = mSource->getTrackInfo(trackIndex); 439 if (info != NULL 440 && info->findInt32("type", &type) 441 && type == MEDIA_TRACK_TYPE_TIMEDTEXT) { 442 ++mTimedTextGeneration; 443 } 444 } 445 } else { 446 trackIndex -= inbandTracks; 447 448 if (trackIndex < ccTracks) { 449 err = mCCDecoder->selectTrack(trackIndex, select); 450 } 451 } 452 453 sp<AMessage> response = new AMessage; 454 response->setInt32("err", err); 455 456 response->postReply(replyID); 457 break; 458 } 459 460 case kWhatPollDuration: 461 { 462 int32_t generation; 463 CHECK(msg->findInt32("generation", &generation)); 464 465 if (generation != mPollDurationGeneration) { 466 // stale 467 break; 468 } 469 470 int64_t durationUs; 471 if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) { 472 sp<NuPlayerDriver> driver = mDriver.promote(); 473 if (driver != NULL) { 474 driver->notifyDuration(durationUs); 475 } 476 } 477 478 msg->post(1000000ll); // poll again in a second. 479 break; 480 } 481 482 case kWhatSetVideoNativeWindow: 483 { 484 ALOGV("kWhatSetVideoNativeWindow"); 485 486 mDeferredActions.push_back( 487 new ShutdownDecoderAction( 488 false /* audio */, true /* video */)); 489 490 sp<RefBase> obj; 491 CHECK(msg->findObject("native-window", &obj)); 492 493 mDeferredActions.push_back( 494 new SetSurfaceAction( 495 static_cast<NativeWindowWrapper *>(obj.get()))); 496 497 if (obj != NULL) { 498 // If there is a new surface texture, instantiate decoders 499 // again if possible. 500 mDeferredActions.push_back( 501 new SimpleAction(&NuPlayer::performScanSources)); 502 } 503 504 processDeferredActions(); 505 break; 506 } 507 508 case kWhatSetAudioSink: 509 { 510 ALOGV("kWhatSetAudioSink"); 511 512 sp<RefBase> obj; 513 CHECK(msg->findObject("sink", &obj)); 514 515 mAudioSink = static_cast<MediaPlayerBase::AudioSink *>(obj.get()); 516 break; 517 } 518 519 case kWhatStart: 520 { 521 ALOGV("kWhatStart"); 522 523 mVideoIsAVC = false; 524 mOffloadAudio = false; 525 mAudioEOS = false; 526 mVideoEOS = false; 527 mSkipRenderingAudioUntilMediaTimeUs = -1; 528 mSkipRenderingVideoUntilMediaTimeUs = -1; 529 mVideoLateByUs = 0; 530 mNumFramesTotal = 0; 531 mNumFramesDropped = 0; 532 mStarted = true; 533 534 /* instantiate decoders now for secure playback */ 535 if (mSourceFlags & Source::FLAG_SECURE) { 536 if (mNativeWindow != NULL) { 537 instantiateDecoder(false, &mVideoDecoder); 538 } 539 540 if (mAudioSink != NULL) { 541 instantiateDecoder(true, &mAudioDecoder); 542 } 543 } 544 545 mSource->start(); 546 547 uint32_t flags = 0; 548 549 if (mSource->isRealTime()) { 550 flags |= Renderer::FLAG_REAL_TIME; 551 } 552 553 sp<MetaData> audioMeta = mSource->getFormatMeta(true /* audio */); 554 audio_stream_type_t streamType = AUDIO_STREAM_MUSIC; 555 if (mAudioSink != NULL) { 556 streamType = mAudioSink->getAudioStreamType(); 557 } 558 559 sp<AMessage> videoFormat = mSource->getFormat(false /* audio */); 560 561 mOffloadAudio = 562 canOffloadStream(audioMeta, (videoFormat != NULL), 563 true /* is_streaming */, streamType); 564 if (mOffloadAudio) { 565 flags |= Renderer::FLAG_OFFLOAD_AUDIO; 566 } 567 568 mRenderer = new Renderer( 569 mAudioSink, 570 new AMessage(kWhatRendererNotify, id()), 571 flags); 572 573 mRendererLooper = new ALooper; 574 mRendererLooper->setName("NuPlayerRenderer"); 575 mRendererLooper->start(false, false, ANDROID_PRIORITY_AUDIO); 576 mRendererLooper->registerHandler(mRenderer); 577 578 postScanSources(); 579 break; 580 } 581 582 case kWhatScanSources: 583 { 584 int32_t generation; 585 CHECK(msg->findInt32("generation", &generation)); 586 if (generation != mScanSourcesGeneration) { 587 // Drop obsolete msg. 588 break; 589 } 590 591 mScanSourcesPending = false; 592 593 ALOGV("scanning sources haveAudio=%d, haveVideo=%d", 594 mAudioDecoder != NULL, mVideoDecoder != NULL); 595 596 bool mHadAnySourcesBefore = 597 (mAudioDecoder != NULL) || (mVideoDecoder != NULL); 598 599 if (mNativeWindow != NULL) { 600 instantiateDecoder(false, &mVideoDecoder); 601 } 602 603 if (mAudioSink != NULL) { 604 instantiateDecoder(true, &mAudioDecoder); 605 } 606 607 if (!mHadAnySourcesBefore 608 && (mAudioDecoder != NULL || mVideoDecoder != NULL)) { 609 // This is the first time we've found anything playable. 610 611 if (mSourceFlags & Source::FLAG_DYNAMIC_DURATION) { 612 schedulePollDuration(); 613 } 614 } 615 616 status_t err; 617 if ((err = mSource->feedMoreTSData()) != OK) { 618 if (mAudioDecoder == NULL && mVideoDecoder == NULL) { 619 // We're not currently decoding anything (no audio or 620 // video tracks found) and we just ran out of input data. 621 622 if (err == ERROR_END_OF_STREAM) { 623 notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0); 624 } else { 625 notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err); 626 } 627 } 628 break; 629 } 630 631 if ((mAudioDecoder == NULL && mAudioSink != NULL) 632 || (mVideoDecoder == NULL && mNativeWindow != NULL)) { 633 msg->post(100000ll); 634 mScanSourcesPending = true; 635 } 636 break; 637 } 638 639 case kWhatVideoNotify: 640 case kWhatAudioNotify: 641 { 642 bool audio = msg->what() == kWhatAudioNotify; 643 644 int32_t what; 645 CHECK(msg->findInt32("what", &what)); 646 647 if (what == Decoder::kWhatFillThisBuffer) { 648 status_t err = feedDecoderInputData( 649 audio, msg); 650 651 if (err == -EWOULDBLOCK) { 652 if (mSource->feedMoreTSData() == OK) { 653 msg->post(10000ll); 654 } 655 } 656 } else if (what == Decoder::kWhatEOS) { 657 int32_t err; 658 CHECK(msg->findInt32("err", &err)); 659 660 if (err == ERROR_END_OF_STREAM) { 661 ALOGV("got %s decoder EOS", audio ? "audio" : "video"); 662 } else { 663 ALOGV("got %s decoder EOS w/ error %d", 664 audio ? "audio" : "video", 665 err); 666 } 667 668 mRenderer->queueEOS(audio, err); 669 } else if (what == Decoder::kWhatFlushCompleted) { 670 bool needShutdown; 671 672 if (audio) { 673 CHECK(IsFlushingState(mFlushingAudio, &needShutdown)); 674 mFlushingAudio = FLUSHED; 675 } else { 676 CHECK(IsFlushingState(mFlushingVideo, &needShutdown)); 677 mFlushingVideo = FLUSHED; 678 679 mVideoLateByUs = 0; 680 } 681 682 ALOGV("decoder %s flush completed", audio ? "audio" : "video"); 683 684 if (needShutdown) { 685 ALOGV("initiating %s decoder shutdown", 686 audio ? "audio" : "video"); 687 688 (audio ? mAudioDecoder : mVideoDecoder)->initiateShutdown(); 689 690 if (audio) { 691 mFlushingAudio = SHUTTING_DOWN_DECODER; 692 } else { 693 mFlushingVideo = SHUTTING_DOWN_DECODER; 694 } 695 } 696 697 finishFlushIfPossible(); 698 } else if (what == Decoder::kWhatOutputFormatChanged) { 699 sp<AMessage> format; 700 CHECK(msg->findMessage("format", &format)); 701 702 if (audio) { 703 int32_t numChannels; 704 CHECK(format->findInt32( 705 "channel-count", &numChannels)); 706 707 int32_t sampleRate; 708 CHECK(format->findInt32("sample-rate", &sampleRate)); 709 710 ALOGV("Audio output format changed to %d Hz, %d channels", 711 sampleRate, numChannels); 712 713 mAudioSink->close(); 714 715 uint32_t flags; 716 int64_t durationUs; 717 // FIXME: we should handle the case where the video decoder 718 // is created after we receive the format change indication. 719 // Current code will just make that we select deep buffer 720 // with video which should not be a problem as it should 721 // not prevent from keeping A/V sync. 722 if (mVideoDecoder == NULL && 723 mSource->getDuration(&durationUs) == OK && 724 durationUs 725 > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) { 726 flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER; 727 } else { 728 flags = AUDIO_OUTPUT_FLAG_NONE; 729 } 730 731 int32_t channelMask; 732 if (!format->findInt32("channel-mask", &channelMask)) { 733 channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER; 734 } 735 736 if (mOffloadAudio) { 737 audio_format_t audioFormat = AUDIO_FORMAT_PCM_16_BIT; 738 audio_offload_info_t offloadInfo = 739 AUDIO_INFO_INITIALIZER; 740 741 AString mime; 742 CHECK(format->findString("mime", &mime)); 743 744 status_t err = 745 mapMimeToAudioFormat(audioFormat, mime.c_str()); 746 if (err != OK) { 747 ALOGE("Couldn't map mime \"%s\" to a valid " 748 "audio_format", mime.c_str()); 749 mOffloadAudio = false; 750 } else { 751 ALOGV("Mime \"%s\" mapped to audio_format 0x%x", 752 mime.c_str(), audioFormat); 753 754 int32_t aacProfile = -1; 755 if (audioFormat == AUDIO_FORMAT_AAC 756 && format->findInt32("aac-profile", &aacProfile)) { 757 // Redefine AAC format as per aac profile 758 mapAACProfileToAudioFormat( 759 audioFormat, 760 aacProfile); 761 } 762 763 flags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD; 764 765 offloadInfo.duration_us = -1; 766 format->findInt64( 767 "durationUs", &offloadInfo.duration_us); 768 769 int avgBitRate = -1; 770 format->findInt32("bit-rate", &avgBitRate); 771 772 offloadInfo.sample_rate = sampleRate; 773 offloadInfo.channel_mask = channelMask; 774 offloadInfo.format = audioFormat; 775 offloadInfo.stream_type = AUDIO_STREAM_MUSIC; 776 offloadInfo.bit_rate = avgBitRate; 777 offloadInfo.has_video = (mVideoDecoder != NULL); 778 offloadInfo.is_streaming = true; 779 780 ALOGV("try to open AudioSink in offload mode"); 781 err = mAudioSink->open( 782 sampleRate, 783 numChannels, 784 (audio_channel_mask_t)channelMask, 785 audioFormat, 786 8 /* bufferCount */, 787 &NuPlayer::Renderer::AudioSinkCallback, 788 mRenderer.get(), 789 (audio_output_flags_t)flags, 790 &offloadInfo); 791 792 if (err == OK) { 793 // If the playback is offloaded to h/w, we pass 794 // the HAL some metadata information. 795 // We don't want to do this for PCM because it 796 // will be going through the AudioFlinger mixer 797 // before reaching the hardware. 798 sp<MetaData> audioMeta = 799 mSource->getFormatMeta(true /* audio */); 800 sendMetaDataToHal(mAudioSink, audioMeta); 801 802 err = mAudioSink->start(); 803 } 804 } 805 806 if (err != OK) { 807 // Clean up, fall back to non offload mode. 808 mAudioSink->close(); 809 mAudioDecoder.clear(); 810 mRenderer->signalDisableOffloadAudio(); 811 mOffloadAudio = false; 812 813 instantiateDecoder( 814 true /* audio */, &mAudioDecoder); 815 } 816 } 817 818 if (!mOffloadAudio) { 819 flags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD; 820 ALOGV("open AudioSink in NON-offload mode"); 821 CHECK_EQ(mAudioSink->open( 822 sampleRate, 823 numChannels, 824 (audio_channel_mask_t)channelMask, 825 AUDIO_FORMAT_PCM_16_BIT, 826 8 /* bufferCount */, 827 NULL, 828 NULL, 829 (audio_output_flags_t)flags), 830 (status_t)OK); 831 mAudioSink->start(); 832 } 833 834 mRenderer->signalAudioSinkChanged(); 835 } else { 836 // video 837 838 int32_t width, height; 839 CHECK(format->findInt32("width", &width)); 840 CHECK(format->findInt32("height", &height)); 841 842 int32_t cropLeft, cropTop, cropRight, cropBottom; 843 CHECK(format->findRect( 844 "crop", 845 &cropLeft, &cropTop, &cropRight, &cropBottom)); 846 847 int32_t displayWidth = cropRight - cropLeft + 1; 848 int32_t displayHeight = cropBottom - cropTop + 1; 849 850 ALOGV("Video output format changed to %d x %d " 851 "(crop: %d x %d @ (%d, %d))", 852 width, height, 853 displayWidth, 854 displayHeight, 855 cropLeft, cropTop); 856 857 sp<AMessage> videoInputFormat = 858 mSource->getFormat(false /* audio */); 859 860 // Take into account sample aspect ratio if necessary: 861 int32_t sarWidth, sarHeight; 862 if (videoInputFormat->findInt32("sar-width", &sarWidth) 863 && videoInputFormat->findInt32( 864 "sar-height", &sarHeight)) { 865 ALOGV("Sample aspect ratio %d : %d", 866 sarWidth, sarHeight); 867 868 displayWidth = (displayWidth * sarWidth) / sarHeight; 869 870 ALOGV("display dimensions %d x %d", 871 displayWidth, displayHeight); 872 } 873 874 int32_t rotationDegrees; 875 if (!videoInputFormat->findInt32( 876 "rotation-degrees", &rotationDegrees)) { 877 rotationDegrees = 0; 878 } 879 880 if (rotationDegrees == 90 || rotationDegrees == 270) { 881 notifyListener( 882 MEDIA_SET_VIDEO_SIZE, 883 displayHeight, 884 displayWidth); 885 } else { 886 notifyListener( 887 MEDIA_SET_VIDEO_SIZE, 888 displayWidth, 889 displayHeight); 890 } 891 } 892 } else if (what == Decoder::kWhatShutdownCompleted) { 893 ALOGV("%s shutdown completed", audio ? "audio" : "video"); 894 if (audio) { 895 mAudioDecoder.clear(); 896 897 CHECK_EQ((int)mFlushingAudio, (int)SHUTTING_DOWN_DECODER); 898 mFlushingAudio = SHUT_DOWN; 899 } else { 900 mVideoDecoder.clear(); 901 902 CHECK_EQ((int)mFlushingVideo, (int)SHUTTING_DOWN_DECODER); 903 mFlushingVideo = SHUT_DOWN; 904 } 905 906 finishFlushIfPossible(); 907 } else if (what == Decoder::kWhatError) { 908 ALOGE("Received error from %s decoder, aborting playback.", 909 audio ? "audio" : "video"); 910 911 mRenderer->queueEOS(audio, UNKNOWN_ERROR); 912 } else if (what == Decoder::kWhatDrainThisBuffer) { 913 renderBuffer(audio, msg); 914 } else { 915 ALOGV("Unhandled decoder notification %d '%c%c%c%c'.", 916 what, 917 what >> 24, 918 (what >> 16) & 0xff, 919 (what >> 8) & 0xff, 920 what & 0xff); 921 } 922 923 break; 924 } 925 926 case kWhatRendererNotify: 927 { 928 int32_t what; 929 CHECK(msg->findInt32("what", &what)); 930 931 if (what == Renderer::kWhatEOS) { 932 int32_t audio; 933 CHECK(msg->findInt32("audio", &audio)); 934 935 int32_t finalResult; 936 CHECK(msg->findInt32("finalResult", &finalResult)); 937 938 if (audio) { 939 mAudioEOS = true; 940 } else { 941 mVideoEOS = true; 942 } 943 944 if (finalResult == ERROR_END_OF_STREAM) { 945 ALOGV("reached %s EOS", audio ? "audio" : "video"); 946 } else { 947 ALOGE("%s track encountered an error (%d)", 948 audio ? "audio" : "video", finalResult); 949 950 notifyListener( 951 MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, finalResult); 952 } 953 954 if ((mAudioEOS || mAudioDecoder == NULL) 955 && (mVideoEOS || mVideoDecoder == NULL)) { 956 notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0); 957 } 958 } else if (what == Renderer::kWhatPosition) { 959 int64_t positionUs; 960 CHECK(msg->findInt64("positionUs", &positionUs)); 961 962 CHECK(msg->findInt64("videoLateByUs", &mVideoLateByUs)); 963 964 if (mDriver != NULL) { 965 sp<NuPlayerDriver> driver = mDriver.promote(); 966 if (driver != NULL) { 967 driver->notifyPosition(positionUs); 968 969 driver->notifyFrameStats( 970 mNumFramesTotal, mNumFramesDropped); 971 } 972 } 973 } else if (what == Renderer::kWhatFlushComplete) { 974 int32_t audio; 975 CHECK(msg->findInt32("audio", &audio)); 976 977 ALOGV("renderer %s flush completed.", audio ? "audio" : "video"); 978 } else if (what == Renderer::kWhatVideoRenderingStart) { 979 notifyListener(MEDIA_INFO, MEDIA_INFO_RENDERING_START, 0); 980 } else if (what == Renderer::kWhatMediaRenderingStart) { 981 ALOGV("media rendering started"); 982 notifyListener(MEDIA_STARTED, 0, 0); 983 } else if (what == Renderer::kWhatAudioOffloadTearDown) { 984 ALOGV("Tear down audio offload, fall back to s/w path"); 985 int64_t positionUs; 986 CHECK(msg->findInt64("positionUs", &positionUs)); 987 mAudioSink->close(); 988 mAudioDecoder.clear(); 989 mRenderer->flush(true /* audio */); 990 if (mVideoDecoder != NULL) { 991 mRenderer->flush(false /* audio */); 992 } 993 mRenderer->signalDisableOffloadAudio(); 994 mOffloadAudio = false; 995 996 performSeek(positionUs); 997 instantiateDecoder(true /* audio */, &mAudioDecoder); 998 } 999 break; 1000 } 1001 1002 case kWhatMoreDataQueued: 1003 { 1004 break; 1005 } 1006 1007 case kWhatReset: 1008 { 1009 ALOGV("kWhatReset"); 1010 1011 mDeferredActions.push_back( 1012 new ShutdownDecoderAction( 1013 true /* audio */, true /* video */)); 1014 1015 mDeferredActions.push_back( 1016 new SimpleAction(&NuPlayer::performReset)); 1017 1018 processDeferredActions(); 1019 break; 1020 } 1021 1022 case kWhatSeek: 1023 { 1024 int64_t seekTimeUs; 1025 CHECK(msg->findInt64("seekTimeUs", &seekTimeUs)); 1026 1027 ALOGV("kWhatSeek seekTimeUs=%lld us", seekTimeUs); 1028 1029 mDeferredActions.push_back( 1030 new SimpleAction(&NuPlayer::performDecoderFlush)); 1031 1032 mDeferredActions.push_back(new SeekAction(seekTimeUs)); 1033 1034 processDeferredActions(); 1035 break; 1036 } 1037 1038 case kWhatPause: 1039 { 1040 CHECK(mRenderer != NULL); 1041 mSource->pause(); 1042 mRenderer->pause(); 1043 break; 1044 } 1045 1046 case kWhatResume: 1047 { 1048 CHECK(mRenderer != NULL); 1049 mSource->resume(); 1050 mRenderer->resume(); 1051 break; 1052 } 1053 1054 case kWhatSourceNotify: 1055 { 1056 onSourceNotify(msg); 1057 break; 1058 } 1059 1060 case kWhatClosedCaptionNotify: 1061 { 1062 onClosedCaptionNotify(msg); 1063 break; 1064 } 1065 1066 default: 1067 TRESPASS(); 1068 break; 1069 } 1070} 1071 1072void NuPlayer::finishFlushIfPossible() { 1073 if (mFlushingAudio != FLUSHED && mFlushingAudio != SHUT_DOWN) { 1074 return; 1075 } 1076 1077 if (mFlushingVideo != FLUSHED && mFlushingVideo != SHUT_DOWN) { 1078 return; 1079 } 1080 1081 ALOGV("both audio and video are flushed now."); 1082 1083 if (mTimeDiscontinuityPending) { 1084 mRenderer->signalTimeDiscontinuity(); 1085 mTimeDiscontinuityPending = false; 1086 } 1087 1088 if (mAudioDecoder != NULL) { 1089 mAudioDecoder->signalResume(); 1090 } 1091 1092 if (mVideoDecoder != NULL) { 1093 mVideoDecoder->signalResume(); 1094 } 1095 1096 mFlushingAudio = NONE; 1097 mFlushingVideo = NONE; 1098 1099 processDeferredActions(); 1100} 1101 1102void NuPlayer::postScanSources() { 1103 if (mScanSourcesPending) { 1104 return; 1105 } 1106 1107 sp<AMessage> msg = new AMessage(kWhatScanSources, id()); 1108 msg->setInt32("generation", mScanSourcesGeneration); 1109 msg->post(); 1110 1111 mScanSourcesPending = true; 1112} 1113 1114status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) { 1115 if (*decoder != NULL) { 1116 return OK; 1117 } 1118 1119 sp<AMessage> format = mSource->getFormat(audio); 1120 1121 if (format == NULL) { 1122 return -EWOULDBLOCK; 1123 } 1124 1125 if (!audio) { 1126 AString mime; 1127 CHECK(format->findString("mime", &mime)); 1128 mVideoIsAVC = !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime.c_str()); 1129 1130 sp<AMessage> ccNotify = new AMessage(kWhatClosedCaptionNotify, id()); 1131 mCCDecoder = new CCDecoder(ccNotify); 1132 1133 if (mSourceFlags & Source::FLAG_SECURE) { 1134 format->setInt32("secure", true); 1135 } 1136 } 1137 1138 sp<AMessage> notify = 1139 new AMessage(audio ? kWhatAudioNotify : kWhatVideoNotify, 1140 id()); 1141 1142 if (audio) { 1143 if (mOffloadAudio) { 1144 *decoder = new DecoderPassThrough(notify); 1145 } else { 1146 *decoder = new Decoder(notify); 1147 } 1148 } else { 1149 *decoder = new Decoder(notify, mNativeWindow); 1150 } 1151 (*decoder)->init(); 1152 (*decoder)->configure(format); 1153 1154 // allocate buffers to decrypt widevine source buffers 1155 if (!audio && (mSourceFlags & Source::FLAG_SECURE)) { 1156 Vector<sp<ABuffer> > inputBufs; 1157 CHECK_EQ((*decoder)->getInputBuffers(&inputBufs), (status_t)OK); 1158 1159 Vector<MediaBuffer *> mediaBufs; 1160 for (size_t i = 0; i < inputBufs.size(); i++) { 1161 const sp<ABuffer> &buffer = inputBufs[i]; 1162 MediaBuffer *mbuf = new MediaBuffer(buffer->data(), buffer->size()); 1163 mediaBufs.push(mbuf); 1164 } 1165 1166 status_t err = mSource->setBuffers(audio, mediaBufs); 1167 if (err != OK) { 1168 for (size_t i = 0; i < mediaBufs.size(); ++i) { 1169 mediaBufs[i]->release(); 1170 } 1171 mediaBufs.clear(); 1172 ALOGE("Secure source didn't support secure mediaBufs."); 1173 return err; 1174 } 1175 } 1176 return OK; 1177} 1178 1179status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) { 1180 sp<AMessage> reply; 1181 CHECK(msg->findMessage("reply", &reply)); 1182 1183 if ((audio && mFlushingAudio != NONE 1184 && mFlushingAudio != AWAITING_DISCONTINUITY) 1185 || (!audio && mFlushingVideo != NONE 1186 && mFlushingVideo != AWAITING_DISCONTINUITY)) { 1187 return -EWOULDBLOCK; 1188 } 1189 1190 sp<ABuffer> accessUnit; 1191 1192 bool dropAccessUnit; 1193 do { 1194 status_t err = mSource->dequeueAccessUnit(audio, &accessUnit); 1195 1196 if (err == -EWOULDBLOCK) { 1197 return err; 1198 } else if (err != OK) { 1199 if (err == INFO_DISCONTINUITY) { 1200 int32_t type; 1201 CHECK(accessUnit->meta()->findInt32("discontinuity", &type)); 1202 1203 bool formatChange = 1204 (audio && 1205 (type & ATSParser::DISCONTINUITY_AUDIO_FORMAT)) 1206 || (!audio && 1207 (type & ATSParser::DISCONTINUITY_VIDEO_FORMAT)); 1208 1209 bool timeChange = (type & ATSParser::DISCONTINUITY_TIME) != 0; 1210 1211 ALOGI("%s discontinuity (formatChange=%d, time=%d)", 1212 audio ? "audio" : "video", formatChange, timeChange); 1213 1214 if (audio) { 1215 mSkipRenderingAudioUntilMediaTimeUs = -1; 1216 } else { 1217 mSkipRenderingVideoUntilMediaTimeUs = -1; 1218 } 1219 1220 if (timeChange) { 1221 sp<AMessage> extra; 1222 if (accessUnit->meta()->findMessage("extra", &extra) 1223 && extra != NULL) { 1224 int64_t resumeAtMediaTimeUs; 1225 if (extra->findInt64( 1226 "resume-at-mediatimeUs", &resumeAtMediaTimeUs)) { 1227 ALOGI("suppressing rendering of %s until %lld us", 1228 audio ? "audio" : "video", resumeAtMediaTimeUs); 1229 1230 if (audio) { 1231 mSkipRenderingAudioUntilMediaTimeUs = 1232 resumeAtMediaTimeUs; 1233 } else { 1234 mSkipRenderingVideoUntilMediaTimeUs = 1235 resumeAtMediaTimeUs; 1236 } 1237 } 1238 } 1239 } 1240 1241 mTimeDiscontinuityPending = 1242 mTimeDiscontinuityPending || timeChange; 1243 1244 if (mFlushingAudio == NONE && mFlushingVideo == NONE) { 1245 // And we'll resume scanning sources once we're done 1246 // flushing. 1247 mDeferredActions.push_front( 1248 new SimpleAction( 1249 &NuPlayer::performScanSources)); 1250 } 1251 1252 if (formatChange || timeChange) { 1253 1254 sp<AMessage> newFormat = mSource->getFormat(audio); 1255 sp<Decoder> &decoder = audio ? mAudioDecoder : mVideoDecoder; 1256 if (formatChange && !decoder->supportsSeamlessFormatChange(newFormat)) { 1257 flushDecoder(audio, /* needShutdown = */ true); 1258 } else { 1259 flushDecoder(audio, /* needShutdown = */ false); 1260 err = OK; 1261 } 1262 } else { 1263 // This stream is unaffected by the discontinuity 1264 1265 if (audio) { 1266 mFlushingAudio = FLUSHED; 1267 } else { 1268 mFlushingVideo = FLUSHED; 1269 } 1270 1271 finishFlushIfPossible(); 1272 1273 return -EWOULDBLOCK; 1274 } 1275 } 1276 1277 reply->setInt32("err", err); 1278 reply->post(); 1279 return OK; 1280 } 1281 1282 if (!audio) { 1283 ++mNumFramesTotal; 1284 } 1285 1286 dropAccessUnit = false; 1287 if (!audio 1288 && !(mSourceFlags & Source::FLAG_SECURE) 1289 && mVideoLateByUs > 100000ll 1290 && mVideoIsAVC 1291 && !IsAVCReferenceFrame(accessUnit)) { 1292 dropAccessUnit = true; 1293 ++mNumFramesDropped; 1294 } 1295 } while (dropAccessUnit); 1296 1297 // ALOGV("returned a valid buffer of %s data", audio ? "audio" : "video"); 1298 1299#if 0 1300 int64_t mediaTimeUs; 1301 CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs)); 1302 ALOGV("feeding %s input buffer at media time %.2f secs", 1303 audio ? "audio" : "video", 1304 mediaTimeUs / 1E6); 1305#endif 1306 1307 if (!audio) { 1308 mCCDecoder->decode(accessUnit); 1309 } 1310 1311 reply->setBuffer("buffer", accessUnit); 1312 reply->post(); 1313 1314 return OK; 1315} 1316 1317void NuPlayer::renderBuffer(bool audio, const sp<AMessage> &msg) { 1318 // ALOGV("renderBuffer %s", audio ? "audio" : "video"); 1319 1320 sp<AMessage> reply; 1321 CHECK(msg->findMessage("reply", &reply)); 1322 1323 if (IsFlushingState(audio ? mFlushingAudio : mFlushingVideo)) { 1324 // We're currently attempting to flush the decoder, in order 1325 // to complete this, the decoder wants all its buffers back, 1326 // so we don't want any output buffers it sent us (from before 1327 // we initiated the flush) to be stuck in the renderer's queue. 1328 1329 ALOGV("we're still flushing the %s decoder, sending its output buffer" 1330 " right back.", audio ? "audio" : "video"); 1331 1332 reply->post(); 1333 return; 1334 } 1335 1336 sp<ABuffer> buffer; 1337 CHECK(msg->findBuffer("buffer", &buffer)); 1338 1339 int64_t mediaTimeUs; 1340 CHECK(buffer->meta()->findInt64("timeUs", &mediaTimeUs)); 1341 1342 int64_t &skipUntilMediaTimeUs = 1343 audio 1344 ? mSkipRenderingAudioUntilMediaTimeUs 1345 : mSkipRenderingVideoUntilMediaTimeUs; 1346 1347 if (skipUntilMediaTimeUs >= 0) { 1348 1349 if (mediaTimeUs < skipUntilMediaTimeUs) { 1350 ALOGV("dropping %s buffer at time %lld as requested.", 1351 audio ? "audio" : "video", 1352 mediaTimeUs); 1353 1354 reply->post(); 1355 return; 1356 } 1357 1358 skipUntilMediaTimeUs = -1; 1359 } 1360 1361 if (!audio && mCCDecoder->isSelected()) { 1362 mCCDecoder->display(mediaTimeUs); 1363 } 1364 1365 mRenderer->queueBuffer(audio, buffer, reply); 1366} 1367 1368void NuPlayer::notifyListener(int msg, int ext1, int ext2, const Parcel *in) { 1369 if (mDriver == NULL) { 1370 return; 1371 } 1372 1373 sp<NuPlayerDriver> driver = mDriver.promote(); 1374 1375 if (driver == NULL) { 1376 return; 1377 } 1378 1379 driver->notifyListener(msg, ext1, ext2, in); 1380} 1381 1382void NuPlayer::flushDecoder(bool audio, bool needShutdown) { 1383 ALOGV("[%s] flushDecoder needShutdown=%d", 1384 audio ? "audio" : "video", needShutdown); 1385 1386 if ((audio && mAudioDecoder == NULL) || (!audio && mVideoDecoder == NULL)) { 1387 ALOGI("flushDecoder %s without decoder present", 1388 audio ? "audio" : "video"); 1389 } 1390 1391 // Make sure we don't continue to scan sources until we finish flushing. 1392 ++mScanSourcesGeneration; 1393 mScanSourcesPending = false; 1394 1395 (audio ? mAudioDecoder : mVideoDecoder)->signalFlush(); 1396 mRenderer->flush(audio); 1397 1398 FlushStatus newStatus = 1399 needShutdown ? FLUSHING_DECODER_SHUTDOWN : FLUSHING_DECODER; 1400 1401 if (audio) { 1402 CHECK(mFlushingAudio == NONE 1403 || mFlushingAudio == AWAITING_DISCONTINUITY); 1404 1405 mFlushingAudio = newStatus; 1406 1407 if (mFlushingVideo == NONE) { 1408 mFlushingVideo = (mVideoDecoder != NULL) 1409 ? AWAITING_DISCONTINUITY 1410 : FLUSHED; 1411 } 1412 } else { 1413 CHECK(mFlushingVideo == NONE 1414 || mFlushingVideo == AWAITING_DISCONTINUITY); 1415 1416 mFlushingVideo = newStatus; 1417 1418 if (mFlushingAudio == NONE) { 1419 mFlushingAudio = (mAudioDecoder != NULL) 1420 ? AWAITING_DISCONTINUITY 1421 : FLUSHED; 1422 } 1423 } 1424} 1425 1426sp<AMessage> NuPlayer::Source::getFormat(bool audio) { 1427 sp<MetaData> meta = getFormatMeta(audio); 1428 1429 if (meta == NULL) { 1430 return NULL; 1431 } 1432 1433 sp<AMessage> msg = new AMessage; 1434 1435 if(convertMetaDataToMessage(meta, &msg) == OK) { 1436 return msg; 1437 } 1438 return NULL; 1439} 1440 1441status_t NuPlayer::setVideoScalingMode(int32_t mode) { 1442 mVideoScalingMode = mode; 1443 if (mNativeWindow != NULL) { 1444 status_t ret = native_window_set_scaling_mode( 1445 mNativeWindow->getNativeWindow().get(), mVideoScalingMode); 1446 if (ret != OK) { 1447 ALOGE("Failed to set scaling mode (%d): %s", 1448 -ret, strerror(-ret)); 1449 return ret; 1450 } 1451 } 1452 return OK; 1453} 1454 1455status_t NuPlayer::getTrackInfo(Parcel* reply) const { 1456 sp<AMessage> msg = new AMessage(kWhatGetTrackInfo, id()); 1457 msg->setPointer("reply", reply); 1458 1459 sp<AMessage> response; 1460 status_t err = msg->postAndAwaitResponse(&response); 1461 return err; 1462} 1463 1464status_t NuPlayer::selectTrack(size_t trackIndex, bool select) { 1465 sp<AMessage> msg = new AMessage(kWhatSelectTrack, id()); 1466 msg->setSize("trackIndex", trackIndex); 1467 msg->setInt32("select", select); 1468 1469 sp<AMessage> response; 1470 status_t err = msg->postAndAwaitResponse(&response); 1471 1472 if (err != OK) { 1473 return err; 1474 } 1475 1476 if (!response->findInt32("err", &err)) { 1477 err = OK; 1478 } 1479 1480 return err; 1481} 1482 1483void NuPlayer::schedulePollDuration() { 1484 sp<AMessage> msg = new AMessage(kWhatPollDuration, id()); 1485 msg->setInt32("generation", mPollDurationGeneration); 1486 msg->post(); 1487} 1488 1489void NuPlayer::cancelPollDuration() { 1490 ++mPollDurationGeneration; 1491} 1492 1493void NuPlayer::processDeferredActions() { 1494 while (!mDeferredActions.empty()) { 1495 // We won't execute any deferred actions until we're no longer in 1496 // an intermediate state, i.e. one more more decoders are currently 1497 // flushing or shutting down. 1498 1499 if (mRenderer != NULL) { 1500 // There's an edge case where the renderer owns all output 1501 // buffers and is paused, therefore the decoder will not read 1502 // more input data and will never encounter the matching 1503 // discontinuity. To avoid this, we resume the renderer. 1504 1505 if (mFlushingAudio == AWAITING_DISCONTINUITY 1506 || mFlushingVideo == AWAITING_DISCONTINUITY) { 1507 mRenderer->resume(); 1508 } 1509 } 1510 1511 if (mFlushingAudio != NONE || mFlushingVideo != NONE) { 1512 // We're currently flushing, postpone the reset until that's 1513 // completed. 1514 1515 ALOGV("postponing action mFlushingAudio=%d, mFlushingVideo=%d", 1516 mFlushingAudio, mFlushingVideo); 1517 1518 break; 1519 } 1520 1521 sp<Action> action = *mDeferredActions.begin(); 1522 mDeferredActions.erase(mDeferredActions.begin()); 1523 1524 action->execute(this); 1525 } 1526} 1527 1528void NuPlayer::performSeek(int64_t seekTimeUs) { 1529 ALOGV("performSeek seekTimeUs=%lld us (%.2f secs)", 1530 seekTimeUs, 1531 seekTimeUs / 1E6); 1532 1533 mSource->seekTo(seekTimeUs); 1534 ++mTimedTextGeneration; 1535 1536 if (mDriver != NULL) { 1537 sp<NuPlayerDriver> driver = mDriver.promote(); 1538 if (driver != NULL) { 1539 driver->notifyPosition(seekTimeUs); 1540 driver->notifySeekComplete(); 1541 } 1542 } 1543 1544 // everything's flushed, continue playback. 1545} 1546 1547void NuPlayer::performDecoderFlush() { 1548 ALOGV("performDecoderFlush"); 1549 1550 if (mAudioDecoder == NULL && mVideoDecoder == NULL) { 1551 return; 1552 } 1553 1554 mTimeDiscontinuityPending = true; 1555 1556 if (mAudioDecoder != NULL) { 1557 flushDecoder(true /* audio */, false /* needShutdown */); 1558 } 1559 1560 if (mVideoDecoder != NULL) { 1561 flushDecoder(false /* audio */, false /* needShutdown */); 1562 } 1563} 1564 1565void NuPlayer::performDecoderShutdown(bool audio, bool video) { 1566 ALOGV("performDecoderShutdown audio=%d, video=%d", audio, video); 1567 1568 if ((!audio || mAudioDecoder == NULL) 1569 && (!video || mVideoDecoder == NULL)) { 1570 return; 1571 } 1572 1573 mTimeDiscontinuityPending = true; 1574 1575 if (mFlushingAudio == NONE && (!audio || mAudioDecoder == NULL)) { 1576 mFlushingAudio = FLUSHED; 1577 } 1578 1579 if (mFlushingVideo == NONE && (!video || mVideoDecoder == NULL)) { 1580 mFlushingVideo = FLUSHED; 1581 } 1582 1583 if (audio && mAudioDecoder != NULL) { 1584 flushDecoder(true /* audio */, true /* needShutdown */); 1585 } 1586 1587 if (video && mVideoDecoder != NULL) { 1588 flushDecoder(false /* audio */, true /* needShutdown */); 1589 } 1590} 1591 1592void NuPlayer::performReset() { 1593 ALOGV("performReset"); 1594 1595 CHECK(mAudioDecoder == NULL); 1596 CHECK(mVideoDecoder == NULL); 1597 1598 cancelPollDuration(); 1599 1600 ++mScanSourcesGeneration; 1601 mScanSourcesPending = false; 1602 1603 if (mRendererLooper != NULL) { 1604 if (mRenderer != NULL) { 1605 mRendererLooper->unregisterHandler(mRenderer->id()); 1606 } 1607 mRendererLooper->stop(); 1608 mRendererLooper.clear(); 1609 } 1610 mRenderer.clear(); 1611 1612 if (mSource != NULL) { 1613 mSource->stop(); 1614 1615 looper()->unregisterHandler(mSource->id()); 1616 1617 mSource.clear(); 1618 } 1619 1620 if (mDriver != NULL) { 1621 sp<NuPlayerDriver> driver = mDriver.promote(); 1622 if (driver != NULL) { 1623 driver->notifyResetComplete(); 1624 } 1625 } 1626 1627 mStarted = false; 1628} 1629 1630void NuPlayer::performScanSources() { 1631 ALOGV("performScanSources"); 1632 1633 if (!mStarted) { 1634 return; 1635 } 1636 1637 if (mAudioDecoder == NULL || mVideoDecoder == NULL) { 1638 postScanSources(); 1639 } 1640} 1641 1642void NuPlayer::performSetSurface(const sp<NativeWindowWrapper> &wrapper) { 1643 ALOGV("performSetSurface"); 1644 1645 mNativeWindow = wrapper; 1646 1647 // XXX - ignore error from setVideoScalingMode for now 1648 setVideoScalingMode(mVideoScalingMode); 1649 1650 if (mDriver != NULL) { 1651 sp<NuPlayerDriver> driver = mDriver.promote(); 1652 if (driver != NULL) { 1653 driver->notifySetSurfaceComplete(); 1654 } 1655 } 1656} 1657 1658void NuPlayer::onSourceNotify(const sp<AMessage> &msg) { 1659 int32_t what; 1660 CHECK(msg->findInt32("what", &what)); 1661 1662 switch (what) { 1663 case Source::kWhatPrepared: 1664 { 1665 if (mSource == NULL) { 1666 // This is a stale notification from a source that was 1667 // asynchronously preparing when the client called reset(). 1668 // We handled the reset, the source is gone. 1669 break; 1670 } 1671 1672 int32_t err; 1673 CHECK(msg->findInt32("err", &err)); 1674 1675 sp<NuPlayerDriver> driver = mDriver.promote(); 1676 if (driver != NULL) { 1677 // notify duration first, so that it's definitely set when 1678 // the app received the "prepare complete" callback. 1679 int64_t durationUs; 1680 if (mSource->getDuration(&durationUs) == OK) { 1681 driver->notifyDuration(durationUs); 1682 } 1683 driver->notifyPrepareCompleted(err); 1684 } 1685 1686 break; 1687 } 1688 1689 case Source::kWhatFlagsChanged: 1690 { 1691 uint32_t flags; 1692 CHECK(msg->findInt32("flags", (int32_t *)&flags)); 1693 1694 sp<NuPlayerDriver> driver = mDriver.promote(); 1695 if (driver != NULL) { 1696 driver->notifyFlagsChanged(flags); 1697 } 1698 1699 if ((mSourceFlags & Source::FLAG_DYNAMIC_DURATION) 1700 && (!(flags & Source::FLAG_DYNAMIC_DURATION))) { 1701 cancelPollDuration(); 1702 } else if (!(mSourceFlags & Source::FLAG_DYNAMIC_DURATION) 1703 && (flags & Source::FLAG_DYNAMIC_DURATION) 1704 && (mAudioDecoder != NULL || mVideoDecoder != NULL)) { 1705 schedulePollDuration(); 1706 } 1707 1708 mSourceFlags = flags; 1709 break; 1710 } 1711 1712 case Source::kWhatVideoSizeChanged: 1713 { 1714 int32_t width, height; 1715 CHECK(msg->findInt32("width", &width)); 1716 CHECK(msg->findInt32("height", &height)); 1717 1718 notifyListener(MEDIA_SET_VIDEO_SIZE, width, height); 1719 break; 1720 } 1721 1722 case Source::kWhatBufferingStart: 1723 { 1724 notifyListener(MEDIA_INFO, MEDIA_INFO_BUFFERING_START, 0); 1725 break; 1726 } 1727 1728 case Source::kWhatBufferingEnd: 1729 { 1730 notifyListener(MEDIA_INFO, MEDIA_INFO_BUFFERING_END, 0); 1731 break; 1732 } 1733 1734 case Source::kWhatSubtitleData: 1735 { 1736 sp<ABuffer> buffer; 1737 CHECK(msg->findBuffer("buffer", &buffer)); 1738 1739 sendSubtitleData(buffer, 0 /* baseIndex */); 1740 break; 1741 } 1742 1743 case Source::kWhatTimedTextData: 1744 { 1745 int32_t generation; 1746 if (msg->findInt32("generation", &generation) 1747 && generation != mTimedTextGeneration) { 1748 break; 1749 } 1750 1751 sp<ABuffer> buffer; 1752 CHECK(msg->findBuffer("buffer", &buffer)); 1753 1754 sp<NuPlayerDriver> driver = mDriver.promote(); 1755 if (driver == NULL) { 1756 break; 1757 } 1758 1759 int posMs; 1760 int64_t timeUs, posUs; 1761 driver->getCurrentPosition(&posMs); 1762 posUs = posMs * 1000; 1763 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 1764 1765 if (posUs < timeUs) { 1766 if (!msg->findInt32("generation", &generation)) { 1767 msg->setInt32("generation", mTimedTextGeneration); 1768 } 1769 msg->post(timeUs - posUs); 1770 } else { 1771 sendTimedTextData(buffer); 1772 } 1773 break; 1774 } 1775 1776 case Source::kWhatQueueDecoderShutdown: 1777 { 1778 int32_t audio, video; 1779 CHECK(msg->findInt32("audio", &audio)); 1780 CHECK(msg->findInt32("video", &video)); 1781 1782 sp<AMessage> reply; 1783 CHECK(msg->findMessage("reply", &reply)); 1784 1785 queueDecoderShutdown(audio, video, reply); 1786 break; 1787 } 1788 1789 default: 1790 TRESPASS(); 1791 } 1792} 1793 1794void NuPlayer::onClosedCaptionNotify(const sp<AMessage> &msg) { 1795 int32_t what; 1796 CHECK(msg->findInt32("what", &what)); 1797 1798 switch (what) { 1799 case NuPlayer::CCDecoder::kWhatClosedCaptionData: 1800 { 1801 sp<ABuffer> buffer; 1802 CHECK(msg->findBuffer("buffer", &buffer)); 1803 1804 size_t inbandTracks = 0; 1805 if (mSource != NULL) { 1806 inbandTracks = mSource->getTrackCount(); 1807 } 1808 1809 sendSubtitleData(buffer, inbandTracks); 1810 break; 1811 } 1812 1813 case NuPlayer::CCDecoder::kWhatTrackAdded: 1814 { 1815 notifyListener(MEDIA_INFO, MEDIA_INFO_METADATA_UPDATE, 0); 1816 1817 break; 1818 } 1819 1820 default: 1821 TRESPASS(); 1822 } 1823 1824 1825} 1826 1827void NuPlayer::sendSubtitleData(const sp<ABuffer> &buffer, int32_t baseIndex) { 1828 int32_t trackIndex; 1829 int64_t timeUs, durationUs; 1830 CHECK(buffer->meta()->findInt32("trackIndex", &trackIndex)); 1831 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 1832 CHECK(buffer->meta()->findInt64("durationUs", &durationUs)); 1833 1834 Parcel in; 1835 in.writeInt32(trackIndex + baseIndex); 1836 in.writeInt64(timeUs); 1837 in.writeInt64(durationUs); 1838 in.writeInt32(buffer->size()); 1839 in.writeInt32(buffer->size()); 1840 in.write(buffer->data(), buffer->size()); 1841 1842 notifyListener(MEDIA_SUBTITLE_DATA, 0, 0, &in); 1843} 1844 1845void NuPlayer::sendTimedTextData(const sp<ABuffer> &buffer) { 1846 const void *data; 1847 size_t size = 0; 1848 int64_t timeUs; 1849 int32_t flag = TextDescriptions::LOCAL_DESCRIPTIONS; 1850 1851 AString mime; 1852 CHECK(buffer->meta()->findString("mime", &mime)); 1853 CHECK(strcasecmp(mime.c_str(), MEDIA_MIMETYPE_TEXT_3GPP) == 0); 1854 1855 data = buffer->data(); 1856 size = buffer->size(); 1857 1858 Parcel parcel; 1859 if (size > 0) { 1860 CHECK(buffer->meta()->findInt64("timeUs", &timeUs)); 1861 flag |= TextDescriptions::IN_BAND_TEXT_3GPP; 1862 TextDescriptions::getParcelOfDescriptions( 1863 (const uint8_t *)data, size, flag, timeUs / 1000, &parcel); 1864 } 1865 1866 if ((parcel.dataSize() > 0)) { 1867 notifyListener(MEDIA_TIMED_TEXT, 0, 0, &parcel); 1868 } else { // send an empty timed text 1869 notifyListener(MEDIA_TIMED_TEXT, 0, 0); 1870 } 1871} 1872//////////////////////////////////////////////////////////////////////////////// 1873 1874void NuPlayer::Source::notifyFlagsChanged(uint32_t flags) { 1875 sp<AMessage> notify = dupNotify(); 1876 notify->setInt32("what", kWhatFlagsChanged); 1877 notify->setInt32("flags", flags); 1878 notify->post(); 1879} 1880 1881void NuPlayer::Source::notifyVideoSizeChanged(int32_t width, int32_t height) { 1882 sp<AMessage> notify = dupNotify(); 1883 notify->setInt32("what", kWhatVideoSizeChanged); 1884 notify->setInt32("width", width); 1885 notify->setInt32("height", height); 1886 notify->post(); 1887} 1888 1889void NuPlayer::Source::notifyPrepared(status_t err) { 1890 sp<AMessage> notify = dupNotify(); 1891 notify->setInt32("what", kWhatPrepared); 1892 notify->setInt32("err", err); 1893 notify->post(); 1894} 1895 1896void NuPlayer::Source::onMessageReceived(const sp<AMessage> & /* msg */) { 1897 TRESPASS(); 1898} 1899 1900void NuPlayer::queueDecoderShutdown( 1901 bool audio, bool video, const sp<AMessage> &reply) { 1902 ALOGI("queueDecoderShutdown audio=%d, video=%d", audio, video); 1903 1904 mDeferredActions.push_back( 1905 new ShutdownDecoderAction(audio, video)); 1906 1907 mDeferredActions.push_back( 1908 new SimpleAction(&NuPlayer::performScanSources)); 1909 1910 mDeferredActions.push_back(new PostMessageAction(reply)); 1911 1912 processDeferredActions(); 1913} 1914 1915} // namespace android 1916