NuPlayer.cpp revision 8b71241ce7353731ab75322c46e090ee35014a33
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "NuPlayer" 19#include <utils/Log.h> 20 21#include "NuPlayer.h" 22 23#include "HTTPLiveSource.h" 24#include "NuPlayerDecoder.h" 25#include "NuPlayerDriver.h" 26#include "NuPlayerRenderer.h" 27#include "NuPlayerSource.h" 28#include "RTSPSource.h" 29#include "StreamingSource.h" 30#include "GenericSource.h" 31 32#include "ATSParser.h" 33 34#include <media/stagefright/foundation/hexdump.h> 35#include <media/stagefright/foundation/ABuffer.h> 36#include <media/stagefright/foundation/ADebug.h> 37#include <media/stagefright/foundation/AMessage.h> 38#include <media/stagefright/ACodec.h> 39#include <media/stagefright/MediaDefs.h> 40#include <media/stagefright/MediaErrors.h> 41#include <media/stagefright/MetaData.h> 42#include <gui/ISurfaceTexture.h> 43 44#include "avc_utils.h" 45 46namespace android { 47 48//////////////////////////////////////////////////////////////////////////////// 49 50NuPlayer::NuPlayer() 51 : mUIDValid(false), 52 mVideoIsAVC(false), 53 mAudioEOS(false), 54 mVideoEOS(false), 55 mScanSourcesPending(false), 56 mScanSourcesGeneration(0), 57 mTimeDiscontinuityPending(false), 58 mFlushingAudio(NONE), 59 mFlushingVideo(NONE), 60 mResetInProgress(false), 61 mResetPostponed(false), 62 mSkipRenderingAudioUntilMediaTimeUs(-1ll), 63 mSkipRenderingVideoUntilMediaTimeUs(-1ll), 64 mVideoLateByUs(0ll), 65 mNumFramesTotal(0ll), 66 mNumFramesDropped(0ll) { 67} 68 69NuPlayer::~NuPlayer() { 70} 71 72void NuPlayer::setUID(uid_t uid) { 73 mUIDValid = true; 74 mUID = uid; 75} 76 77void NuPlayer::setDriver(const wp<NuPlayerDriver> &driver) { 78 mDriver = driver; 79} 80 81void NuPlayer::setDataSource(const sp<IStreamSource> &source) { 82 sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); 83 84 msg->setObject("source", new StreamingSource(source)); 85 msg->post(); 86} 87 88static bool IsHTTPLiveURL(const char *url) { 89 if (!strncasecmp("http://", url, 7) 90 || !strncasecmp("https://", url, 8)) { 91 size_t len = strlen(url); 92 if (len >= 5 && !strcasecmp(".m3u8", &url[len - 5])) { 93 return true; 94 } 95 96 if (strstr(url,"m3u8")) { 97 return true; 98 } 99 } 100 101 return false; 102} 103 104void NuPlayer::setDataSource( 105 const char *url, const KeyedVector<String8, String8> *headers) { 106 sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); 107 108 sp<Source> source; 109 if (IsHTTPLiveURL(url)) { 110 source = new HTTPLiveSource(url, headers, mUIDValid, mUID); 111 } else if (!strncasecmp(url, "rtsp://", 7)) { 112 source = new RTSPSource(url, headers, mUIDValid, mUID); 113 } else { 114 source = new GenericSource(url, headers, mUIDValid, mUID); 115 } 116 117 msg->setObject("source", source); 118 msg->post(); 119} 120 121void NuPlayer::setDataSource(int fd, int64_t offset, int64_t length) { 122 sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); 123 124 sp<Source> source = new GenericSource(fd, offset, length); 125 msg->setObject("source", source); 126 msg->post(); 127} 128 129void NuPlayer::setVideoSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture) { 130 sp<AMessage> msg = new AMessage(kWhatSetVideoNativeWindow, id()); 131 sp<SurfaceTextureClient> surfaceTextureClient(surfaceTexture != NULL ? 132 new SurfaceTextureClient(surfaceTexture) : NULL); 133 msg->setObject("native-window", new NativeWindowWrapper(surfaceTextureClient)); 134 msg->post(); 135} 136 137void NuPlayer::setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink) { 138 sp<AMessage> msg = new AMessage(kWhatSetAudioSink, id()); 139 msg->setObject("sink", sink); 140 msg->post(); 141} 142 143void NuPlayer::start() { 144 (new AMessage(kWhatStart, id()))->post(); 145} 146 147void NuPlayer::pause() { 148 (new AMessage(kWhatPause, id()))->post(); 149} 150 151void NuPlayer::resume() { 152 (new AMessage(kWhatResume, id()))->post(); 153} 154 155void NuPlayer::resetAsync() { 156 (new AMessage(kWhatReset, id()))->post(); 157} 158 159void NuPlayer::seekToAsync(int64_t seekTimeUs) { 160 sp<AMessage> msg = new AMessage(kWhatSeek, id()); 161 msg->setInt64("seekTimeUs", seekTimeUs); 162 msg->post(); 163} 164 165// static 166bool NuPlayer::IsFlushingState(FlushStatus state, bool *needShutdown) { 167 switch (state) { 168 case FLUSHING_DECODER: 169 if (needShutdown != NULL) { 170 *needShutdown = false; 171 } 172 return true; 173 174 case FLUSHING_DECODER_SHUTDOWN: 175 if (needShutdown != NULL) { 176 *needShutdown = true; 177 } 178 return true; 179 180 default: 181 return false; 182 } 183} 184 185void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { 186 switch (msg->what()) { 187 case kWhatSetDataSource: 188 { 189 ALOGV("kWhatSetDataSource"); 190 191 CHECK(mSource == NULL); 192 193 sp<RefBase> obj; 194 CHECK(msg->findObject("source", &obj)); 195 196 mSource = static_cast<Source *>(obj.get()); 197 break; 198 } 199 200 case kWhatSetVideoNativeWindow: 201 { 202 ALOGV("kWhatSetVideoNativeWindow"); 203 204 sp<RefBase> obj; 205 CHECK(msg->findObject("native-window", &obj)); 206 207 mNativeWindow = static_cast<NativeWindowWrapper *>(obj.get()); 208 break; 209 } 210 211 case kWhatSetAudioSink: 212 { 213 ALOGV("kWhatSetAudioSink"); 214 215 sp<RefBase> obj; 216 CHECK(msg->findObject("sink", &obj)); 217 218 mAudioSink = static_cast<MediaPlayerBase::AudioSink *>(obj.get()); 219 break; 220 } 221 222 case kWhatStart: 223 { 224 ALOGV("kWhatStart"); 225 226 mVideoIsAVC = false; 227 mAudioEOS = false; 228 mVideoEOS = false; 229 mSkipRenderingAudioUntilMediaTimeUs = -1; 230 mSkipRenderingVideoUntilMediaTimeUs = -1; 231 mVideoLateByUs = 0; 232 mNumFramesTotal = 0; 233 mNumFramesDropped = 0; 234 235 mSource->start(); 236 237 mRenderer = new Renderer( 238 mAudioSink, 239 new AMessage(kWhatRendererNotify, id())); 240 241 looper()->registerHandler(mRenderer); 242 243 postScanSources(); 244 break; 245 } 246 247 case kWhatScanSources: 248 { 249 int32_t generation; 250 CHECK(msg->findInt32("generation", &generation)); 251 if (generation != mScanSourcesGeneration) { 252 // Drop obsolete msg. 253 break; 254 } 255 256 mScanSourcesPending = false; 257 258 ALOGV("scanning sources haveAudio=%d, haveVideo=%d", 259 mAudioDecoder != NULL, mVideoDecoder != NULL); 260 261 instantiateDecoder(false, &mVideoDecoder); 262 263 if (mAudioSink != NULL) { 264 instantiateDecoder(true, &mAudioDecoder); 265 } 266 267 status_t err; 268 if ((err = mSource->feedMoreTSData()) != OK) { 269 if (mAudioDecoder == NULL && mVideoDecoder == NULL) { 270 // We're not currently decoding anything (no audio or 271 // video tracks found) and we just ran out of input data. 272 273 if (err == ERROR_END_OF_STREAM) { 274 notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0); 275 } else { 276 notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err); 277 } 278 } 279 break; 280 } 281 282 if (mAudioDecoder == NULL || mVideoDecoder == NULL) { 283 msg->post(100000ll); 284 mScanSourcesPending = true; 285 } 286 break; 287 } 288 289 case kWhatVideoNotify: 290 case kWhatAudioNotify: 291 { 292 bool audio = msg->what() == kWhatAudioNotify; 293 294 sp<AMessage> codecRequest; 295 CHECK(msg->findMessage("codec-request", &codecRequest)); 296 297 int32_t what; 298 CHECK(codecRequest->findInt32("what", &what)); 299 300 if (what == ACodec::kWhatFillThisBuffer) { 301 status_t err = feedDecoderInputData( 302 audio, codecRequest); 303 304 if (err == -EWOULDBLOCK) { 305 if (mSource->feedMoreTSData() == OK) { 306 msg->post(10000ll); 307 } 308 } 309 } else if (what == ACodec::kWhatEOS) { 310 int32_t err; 311 CHECK(codecRequest->findInt32("err", &err)); 312 313 if (err == ERROR_END_OF_STREAM) { 314 ALOGV("got %s decoder EOS", audio ? "audio" : "video"); 315 } else { 316 ALOGV("got %s decoder EOS w/ error %d", 317 audio ? "audio" : "video", 318 err); 319 } 320 321 mRenderer->queueEOS(audio, err); 322 } else if (what == ACodec::kWhatFlushCompleted) { 323 bool needShutdown; 324 325 if (audio) { 326 CHECK(IsFlushingState(mFlushingAudio, &needShutdown)); 327 mFlushingAudio = FLUSHED; 328 } else { 329 CHECK(IsFlushingState(mFlushingVideo, &needShutdown)); 330 mFlushingVideo = FLUSHED; 331 332 mVideoLateByUs = 0; 333 } 334 335 ALOGV("decoder %s flush completed", audio ? "audio" : "video"); 336 337 if (needShutdown) { 338 ALOGV("initiating %s decoder shutdown", 339 audio ? "audio" : "video"); 340 341 (audio ? mAudioDecoder : mVideoDecoder)->initiateShutdown(); 342 343 if (audio) { 344 mFlushingAudio = SHUTTING_DOWN_DECODER; 345 } else { 346 mFlushingVideo = SHUTTING_DOWN_DECODER; 347 } 348 } 349 350 finishFlushIfPossible(); 351 } else if (what == ACodec::kWhatOutputFormatChanged) { 352 if (audio) { 353 int32_t numChannels; 354 CHECK(codecRequest->findInt32("channel-count", &numChannels)); 355 356 int32_t sampleRate; 357 CHECK(codecRequest->findInt32("sample-rate", &sampleRate)); 358 359 ALOGV("Audio output format changed to %d Hz, %d channels", 360 sampleRate, numChannels); 361 362 mAudioSink->close(); 363 364 audio_output_flags_t flags; 365 int64_t durationUs; 366 // FIXME: we should handle the case where the video decoder is created after 367 // we receive the format change indication. Current code will just make that 368 // we select deep buffer with video which should not be a problem as it should 369 // not prevent from keeping A/V sync. 370 if (mVideoDecoder == NULL && 371 mSource->getDuration(&durationUs) == OK && 372 durationUs > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) { 373 flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER; 374 } else { 375 flags = AUDIO_OUTPUT_FLAG_NONE; 376 } 377 378 CHECK_EQ(mAudioSink->open( 379 sampleRate, 380 numChannels, 381 CHANNEL_MASK_USE_CHANNEL_ORDER, 382 AUDIO_FORMAT_PCM_16_BIT, 383 8 /* bufferCount */, 384 NULL, 385 NULL, 386 flags), 387 (status_t)OK); 388 mAudioSink->start(); 389 390 mRenderer->signalAudioSinkChanged(); 391 } else { 392 // video 393 394 int32_t width, height; 395 CHECK(codecRequest->findInt32("width", &width)); 396 CHECK(codecRequest->findInt32("height", &height)); 397 398 int32_t cropLeft, cropTop, cropRight, cropBottom; 399 CHECK(codecRequest->findRect( 400 "crop", 401 &cropLeft, &cropTop, &cropRight, &cropBottom)); 402 403 ALOGV("Video output format changed to %d x %d " 404 "(crop: %d x %d @ (%d, %d))", 405 width, height, 406 (cropRight - cropLeft + 1), 407 (cropBottom - cropTop + 1), 408 cropLeft, cropTop); 409 410 notifyListener( 411 MEDIA_SET_VIDEO_SIZE, 412 cropRight - cropLeft + 1, 413 cropBottom - cropTop + 1); 414 } 415 } else if (what == ACodec::kWhatShutdownCompleted) { 416 ALOGV("%s shutdown completed", audio ? "audio" : "video"); 417 if (audio) { 418 mAudioDecoder.clear(); 419 420 CHECK_EQ((int)mFlushingAudio, (int)SHUTTING_DOWN_DECODER); 421 mFlushingAudio = SHUT_DOWN; 422 } else { 423 mVideoDecoder.clear(); 424 425 CHECK_EQ((int)mFlushingVideo, (int)SHUTTING_DOWN_DECODER); 426 mFlushingVideo = SHUT_DOWN; 427 } 428 429 finishFlushIfPossible(); 430 } else if (what == ACodec::kWhatError) { 431 ALOGE("Received error from %s decoder, aborting playback.", 432 audio ? "audio" : "video"); 433 434 mRenderer->queueEOS(audio, UNKNOWN_ERROR); 435 } else if (what == ACodec::kWhatDrainThisBuffer) { 436 renderBuffer(audio, codecRequest); 437 } else { 438 ALOGV("Unhandled codec notification %d.", what); 439 } 440 441 break; 442 } 443 444 case kWhatRendererNotify: 445 { 446 int32_t what; 447 CHECK(msg->findInt32("what", &what)); 448 449 if (what == Renderer::kWhatEOS) { 450 int32_t audio; 451 CHECK(msg->findInt32("audio", &audio)); 452 453 int32_t finalResult; 454 CHECK(msg->findInt32("finalResult", &finalResult)); 455 456 if (audio) { 457 mAudioEOS = true; 458 } else { 459 mVideoEOS = true; 460 } 461 462 if (finalResult == ERROR_END_OF_STREAM) { 463 ALOGV("reached %s EOS", audio ? "audio" : "video"); 464 } else { 465 ALOGE("%s track encountered an error (%d)", 466 audio ? "audio" : "video", finalResult); 467 468 notifyListener( 469 MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, finalResult); 470 } 471 472 if ((mAudioEOS || mAudioDecoder == NULL) 473 && (mVideoEOS || mVideoDecoder == NULL)) { 474 notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0); 475 } 476 } else if (what == Renderer::kWhatPosition) { 477 int64_t positionUs; 478 CHECK(msg->findInt64("positionUs", &positionUs)); 479 480 CHECK(msg->findInt64("videoLateByUs", &mVideoLateByUs)); 481 482 if (mDriver != NULL) { 483 sp<NuPlayerDriver> driver = mDriver.promote(); 484 if (driver != NULL) { 485 driver->notifyPosition(positionUs); 486 487 driver->notifyFrameStats( 488 mNumFramesTotal, mNumFramesDropped); 489 } 490 } 491 } else if (what == Renderer::kWhatFlushComplete) { 492 CHECK_EQ(what, (int32_t)Renderer::kWhatFlushComplete); 493 494 int32_t audio; 495 CHECK(msg->findInt32("audio", &audio)); 496 497 ALOGV("renderer %s flush completed.", audio ? "audio" : "video"); 498 } 499 break; 500 } 501 502 case kWhatMoreDataQueued: 503 { 504 break; 505 } 506 507 case kWhatReset: 508 { 509 ALOGV("kWhatReset"); 510 511 if (mRenderer != NULL) { 512 // There's an edge case where the renderer owns all output 513 // buffers and is paused, therefore the decoder will not read 514 // more input data and will never encounter the matching 515 // discontinuity. To avoid this, we resume the renderer. 516 517 if (mFlushingAudio == AWAITING_DISCONTINUITY 518 || mFlushingVideo == AWAITING_DISCONTINUITY) { 519 mRenderer->resume(); 520 } 521 } 522 523 if (mFlushingAudio != NONE || mFlushingVideo != NONE) { 524 // We're currently flushing, postpone the reset until that's 525 // completed. 526 527 ALOGV("postponing reset mFlushingAudio=%d, mFlushingVideo=%d", 528 mFlushingAudio, mFlushingVideo); 529 530 mResetPostponed = true; 531 break; 532 } 533 534 if (mAudioDecoder == NULL && mVideoDecoder == NULL) { 535 finishReset(); 536 break; 537 } 538 539 mTimeDiscontinuityPending = true; 540 541 if (mAudioDecoder != NULL) { 542 flushDecoder(true /* audio */, true /* needShutdown */); 543 } 544 545 if (mVideoDecoder != NULL) { 546 flushDecoder(false /* audio */, true /* needShutdown */); 547 } 548 549 mResetInProgress = true; 550 break; 551 } 552 553 case kWhatSeek: 554 { 555 int64_t seekTimeUs; 556 CHECK(msg->findInt64("seekTimeUs", &seekTimeUs)); 557 558 ALOGV("kWhatSeek seekTimeUs=%lld us (%.2f secs)", 559 seekTimeUs, seekTimeUs / 1E6); 560 561 mSource->seekTo(seekTimeUs); 562 563 if (mDriver != NULL) { 564 sp<NuPlayerDriver> driver = mDriver.promote(); 565 if (driver != NULL) { 566 driver->notifySeekComplete(); 567 } 568 } 569 570 break; 571 } 572 573 case kWhatPause: 574 { 575 CHECK(mRenderer != NULL); 576 mRenderer->pause(); 577 break; 578 } 579 580 case kWhatResume: 581 { 582 CHECK(mRenderer != NULL); 583 mRenderer->resume(); 584 break; 585 } 586 587 default: 588 TRESPASS(); 589 break; 590 } 591} 592 593void NuPlayer::finishFlushIfPossible() { 594 if (mFlushingAudio != FLUSHED && mFlushingAudio != SHUT_DOWN) { 595 return; 596 } 597 598 if (mFlushingVideo != FLUSHED && mFlushingVideo != SHUT_DOWN) { 599 return; 600 } 601 602 ALOGV("both audio and video are flushed now."); 603 604 if (mTimeDiscontinuityPending) { 605 mRenderer->signalTimeDiscontinuity(); 606 mTimeDiscontinuityPending = false; 607 } 608 609 if (mAudioDecoder != NULL) { 610 mAudioDecoder->signalResume(); 611 } 612 613 if (mVideoDecoder != NULL) { 614 mVideoDecoder->signalResume(); 615 } 616 617 mFlushingAudio = NONE; 618 mFlushingVideo = NONE; 619 620 if (mResetInProgress) { 621 ALOGV("reset completed"); 622 623 mResetInProgress = false; 624 finishReset(); 625 } else if (mResetPostponed) { 626 (new AMessage(kWhatReset, id()))->post(); 627 mResetPostponed = false; 628 } else if (mAudioDecoder == NULL || mVideoDecoder == NULL) { 629 postScanSources(); 630 } 631} 632 633void NuPlayer::finishReset() { 634 CHECK(mAudioDecoder == NULL); 635 CHECK(mVideoDecoder == NULL); 636 637 ++mScanSourcesGeneration; 638 mScanSourcesPending = false; 639 640 mRenderer.clear(); 641 642 if (mSource != NULL) { 643 mSource->stop(); 644 mSource.clear(); 645 } 646 647 if (mDriver != NULL) { 648 sp<NuPlayerDriver> driver = mDriver.promote(); 649 if (driver != NULL) { 650 driver->notifyResetComplete(); 651 } 652 } 653} 654 655void NuPlayer::postScanSources() { 656 if (mScanSourcesPending) { 657 return; 658 } 659 660 sp<AMessage> msg = new AMessage(kWhatScanSources, id()); 661 msg->setInt32("generation", mScanSourcesGeneration); 662 msg->post(); 663 664 mScanSourcesPending = true; 665} 666 667status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) { 668 if (*decoder != NULL) { 669 return OK; 670 } 671 672 sp<MetaData> meta = mSource->getFormat(audio); 673 674 if (meta == NULL) { 675 return -EWOULDBLOCK; 676 } 677 678 if (!audio) { 679 const char *mime; 680 CHECK(meta->findCString(kKeyMIMEType, &mime)); 681 mVideoIsAVC = !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime); 682 } 683 684 sp<AMessage> notify = 685 new AMessage(audio ? kWhatAudioNotify : kWhatVideoNotify, 686 id()); 687 688 *decoder = audio ? new Decoder(notify) : 689 new Decoder(notify, mNativeWindow); 690 looper()->registerHandler(*decoder); 691 692 (*decoder)->configure(meta); 693 694 int64_t durationUs; 695 if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) { 696 sp<NuPlayerDriver> driver = mDriver.promote(); 697 if (driver != NULL) { 698 driver->notifyDuration(durationUs); 699 } 700 } 701 702 return OK; 703} 704 705status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) { 706 sp<AMessage> reply; 707 CHECK(msg->findMessage("reply", &reply)); 708 709 if ((audio && IsFlushingState(mFlushingAudio)) 710 || (!audio && IsFlushingState(mFlushingVideo))) { 711 reply->setInt32("err", INFO_DISCONTINUITY); 712 reply->post(); 713 return OK; 714 } 715 716 sp<ABuffer> accessUnit; 717 718 bool dropAccessUnit; 719 do { 720 status_t err = mSource->dequeueAccessUnit(audio, &accessUnit); 721 722 if (err == -EWOULDBLOCK) { 723 return err; 724 } else if (err != OK) { 725 if (err == INFO_DISCONTINUITY) { 726 int32_t type; 727 CHECK(accessUnit->meta()->findInt32("discontinuity", &type)); 728 729 bool formatChange = 730 (audio && 731 (type & ATSParser::DISCONTINUITY_AUDIO_FORMAT)) 732 || (!audio && 733 (type & ATSParser::DISCONTINUITY_VIDEO_FORMAT)); 734 735 bool timeChange = (type & ATSParser::DISCONTINUITY_TIME) != 0; 736 737 ALOGI("%s discontinuity (formatChange=%d, time=%d)", 738 audio ? "audio" : "video", formatChange, timeChange); 739 740 if (audio) { 741 mSkipRenderingAudioUntilMediaTimeUs = -1; 742 } else { 743 mSkipRenderingVideoUntilMediaTimeUs = -1; 744 } 745 746 if (timeChange) { 747 sp<AMessage> extra; 748 if (accessUnit->meta()->findMessage("extra", &extra) 749 && extra != NULL) { 750 int64_t resumeAtMediaTimeUs; 751 if (extra->findInt64( 752 "resume-at-mediatimeUs", &resumeAtMediaTimeUs)) { 753 ALOGI("suppressing rendering of %s until %lld us", 754 audio ? "audio" : "video", resumeAtMediaTimeUs); 755 756 if (audio) { 757 mSkipRenderingAudioUntilMediaTimeUs = 758 resumeAtMediaTimeUs; 759 } else { 760 mSkipRenderingVideoUntilMediaTimeUs = 761 resumeAtMediaTimeUs; 762 } 763 } 764 } 765 } 766 767 mTimeDiscontinuityPending = 768 mTimeDiscontinuityPending || timeChange; 769 770 if (formatChange || timeChange) { 771 flushDecoder(audio, formatChange); 772 } else { 773 // This stream is unaffected by the discontinuity 774 775 if (audio) { 776 mFlushingAudio = FLUSHED; 777 } else { 778 mFlushingVideo = FLUSHED; 779 } 780 781 finishFlushIfPossible(); 782 783 return -EWOULDBLOCK; 784 } 785 } 786 787 reply->setInt32("err", err); 788 reply->post(); 789 return OK; 790 } 791 792 if (!audio) { 793 ++mNumFramesTotal; 794 } 795 796 dropAccessUnit = false; 797 if (!audio 798 && mVideoLateByUs > 100000ll 799 && mVideoIsAVC 800 && !IsAVCReferenceFrame(accessUnit)) { 801 dropAccessUnit = true; 802 ++mNumFramesDropped; 803 } 804 } while (dropAccessUnit); 805 806 // ALOGV("returned a valid buffer of %s data", audio ? "audio" : "video"); 807 808#if 0 809 int64_t mediaTimeUs; 810 CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs)); 811 ALOGV("feeding %s input buffer at media time %.2f secs", 812 audio ? "audio" : "video", 813 mediaTimeUs / 1E6); 814#endif 815 816 reply->setBuffer("buffer", accessUnit); 817 reply->post(); 818 819 return OK; 820} 821 822void NuPlayer::renderBuffer(bool audio, const sp<AMessage> &msg) { 823 // ALOGV("renderBuffer %s", audio ? "audio" : "video"); 824 825 sp<AMessage> reply; 826 CHECK(msg->findMessage("reply", &reply)); 827 828 if (IsFlushingState(audio ? mFlushingAudio : mFlushingVideo)) { 829 // We're currently attempting to flush the decoder, in order 830 // to complete this, the decoder wants all its buffers back, 831 // so we don't want any output buffers it sent us (from before 832 // we initiated the flush) to be stuck in the renderer's queue. 833 834 ALOGV("we're still flushing the %s decoder, sending its output buffer" 835 " right back.", audio ? "audio" : "video"); 836 837 reply->post(); 838 return; 839 } 840 841 sp<ABuffer> buffer; 842 CHECK(msg->findBuffer("buffer", &buffer)); 843 844 int64_t &skipUntilMediaTimeUs = 845 audio 846 ? mSkipRenderingAudioUntilMediaTimeUs 847 : mSkipRenderingVideoUntilMediaTimeUs; 848 849 if (skipUntilMediaTimeUs >= 0) { 850 int64_t mediaTimeUs; 851 CHECK(buffer->meta()->findInt64("timeUs", &mediaTimeUs)); 852 853 if (mediaTimeUs < skipUntilMediaTimeUs) { 854 ALOGV("dropping %s buffer at time %lld as requested.", 855 audio ? "audio" : "video", 856 mediaTimeUs); 857 858 reply->post(); 859 return; 860 } 861 862 skipUntilMediaTimeUs = -1; 863 } 864 865 mRenderer->queueBuffer(audio, buffer, reply); 866} 867 868void NuPlayer::notifyListener(int msg, int ext1, int ext2) { 869 if (mDriver == NULL) { 870 return; 871 } 872 873 sp<NuPlayerDriver> driver = mDriver.promote(); 874 875 if (driver == NULL) { 876 return; 877 } 878 879 driver->notifyListener(msg, ext1, ext2); 880} 881 882void NuPlayer::flushDecoder(bool audio, bool needShutdown) { 883 if ((audio && mAudioDecoder == NULL) || (!audio && mVideoDecoder == NULL)) { 884 ALOGI("flushDecoder %s without decoder present", 885 audio ? "audio" : "video"); 886 } 887 888 // Make sure we don't continue to scan sources until we finish flushing. 889 ++mScanSourcesGeneration; 890 mScanSourcesPending = false; 891 892 (audio ? mAudioDecoder : mVideoDecoder)->signalFlush(); 893 mRenderer->flush(audio); 894 895 FlushStatus newStatus = 896 needShutdown ? FLUSHING_DECODER_SHUTDOWN : FLUSHING_DECODER; 897 898 if (audio) { 899 CHECK(mFlushingAudio == NONE 900 || mFlushingAudio == AWAITING_DISCONTINUITY); 901 902 mFlushingAudio = newStatus; 903 904 if (mFlushingVideo == NONE) { 905 mFlushingVideo = (mVideoDecoder != NULL) 906 ? AWAITING_DISCONTINUITY 907 : FLUSHED; 908 } 909 } else { 910 CHECK(mFlushingVideo == NONE 911 || mFlushingVideo == AWAITING_DISCONTINUITY); 912 913 mFlushingVideo = newStatus; 914 915 if (mFlushingAudio == NONE) { 916 mFlushingAudio = (mAudioDecoder != NULL) 917 ? AWAITING_DISCONTINUITY 918 : FLUSHED; 919 } 920 } 921} 922 923} // namespace android 924