NuPlayer.cpp revision 9806555d3930be43e11106281dee354820ac1c88
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "NuPlayer" 19#include <utils/Log.h> 20 21#include "NuPlayer.h" 22 23#include "HTTPLiveSource.h" 24#include "NuPlayerDecoder.h" 25#include "NuPlayerDriver.h" 26#include "NuPlayerRenderer.h" 27#include "NuPlayerSource.h" 28#include "RTSPSource.h" 29#include "StreamingSource.h" 30#include "GenericSource.h" 31 32#include "ATSParser.h" 33 34#include <media/stagefright/foundation/hexdump.h> 35#include <media/stagefright/foundation/ABuffer.h> 36#include <media/stagefright/foundation/ADebug.h> 37#include <media/stagefright/foundation/AMessage.h> 38#include <media/stagefright/ACodec.h> 39#include <media/stagefright/MediaDefs.h> 40#include <media/stagefright/MediaErrors.h> 41#include <media/stagefright/MetaData.h> 42#include <gui/ISurfaceTexture.h> 43 44#include "avc_utils.h" 45 46namespace android { 47 48//////////////////////////////////////////////////////////////////////////////// 49 50NuPlayer::NuPlayer() 51 : mUIDValid(false), 52 mVideoIsAVC(false), 53 mAudioEOS(false), 54 mVideoEOS(false), 55 mScanSourcesPending(false), 56 mScanSourcesGeneration(0), 57 mTimeDiscontinuityPending(false), 58 mFlushingAudio(NONE), 59 mFlushingVideo(NONE), 60 mResetInProgress(false), 61 mResetPostponed(false), 62 mSkipRenderingAudioUntilMediaTimeUs(-1ll), 63 mSkipRenderingVideoUntilMediaTimeUs(-1ll), 64 mVideoLateByUs(0ll), 65 mNumFramesTotal(0ll), 66 mNumFramesDropped(0ll) { 67} 68 69NuPlayer::~NuPlayer() { 70} 71 72void NuPlayer::setUID(uid_t uid) { 73 mUIDValid = true; 74 mUID = uid; 75} 76 77void NuPlayer::setDriver(const wp<NuPlayerDriver> &driver) { 78 mDriver = driver; 79} 80 81void NuPlayer::setDataSource(const sp<IStreamSource> &source) { 82 sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); 83 84 msg->setObject("source", new StreamingSource(source)); 85 msg->post(); 86} 87 88static bool IsHTTPLiveURL(const char *url) { 89 if (!strncasecmp("http://", url, 7) 90 || !strncasecmp("https://", url, 8)) { 91 size_t len = strlen(url); 92 if (len >= 5 && !strcasecmp(".m3u8", &url[len - 5])) { 93 return true; 94 } 95 96 if (strstr(url,"m3u8")) { 97 return true; 98 } 99 } 100 101 return false; 102} 103 104void NuPlayer::setDataSource( 105 const char *url, const KeyedVector<String8, String8> *headers) { 106 sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); 107 108 sp<Source> source; 109 if (IsHTTPLiveURL(url)) { 110 source = new HTTPLiveSource(url, headers, mUIDValid, mUID); 111 } else if (!strncasecmp(url, "rtsp://", 7)) { 112 source = new RTSPSource(url, headers, mUIDValid, mUID); 113 } else { 114 source = new GenericSource(url, headers, mUIDValid, mUID); 115 } 116 117 msg->setObject("source", source); 118 msg->post(); 119} 120 121void NuPlayer::setDataSource(int fd, int64_t offset, int64_t length) { 122 sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); 123 124 sp<Source> source = new GenericSource(fd, offset, length); 125 msg->setObject("source", source); 126 msg->post(); 127} 128 129void NuPlayer::setVideoSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture) { 130 sp<AMessage> msg = new AMessage(kWhatSetVideoNativeWindow, id()); 131 sp<SurfaceTextureClient> surfaceTextureClient(surfaceTexture != NULL ? 132 new SurfaceTextureClient(surfaceTexture) : NULL); 133 msg->setObject("native-window", new NativeWindowWrapper(surfaceTextureClient)); 134 msg->post(); 135} 136 137void NuPlayer::setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink) { 138 sp<AMessage> msg = new AMessage(kWhatSetAudioSink, id()); 139 msg->setObject("sink", sink); 140 msg->post(); 141} 142 143void NuPlayer::start() { 144 (new AMessage(kWhatStart, id()))->post(); 145} 146 147void NuPlayer::pause() { 148 (new AMessage(kWhatPause, id()))->post(); 149} 150 151void NuPlayer::resume() { 152 (new AMessage(kWhatResume, id()))->post(); 153} 154 155void NuPlayer::resetAsync() { 156 (new AMessage(kWhatReset, id()))->post(); 157} 158 159void NuPlayer::seekToAsync(int64_t seekTimeUs) { 160 sp<AMessage> msg = new AMessage(kWhatSeek, id()); 161 msg->setInt64("seekTimeUs", seekTimeUs); 162 msg->post(); 163} 164 165// static 166bool NuPlayer::IsFlushingState(FlushStatus state, bool *needShutdown) { 167 switch (state) { 168 case FLUSHING_DECODER: 169 if (needShutdown != NULL) { 170 *needShutdown = false; 171 } 172 return true; 173 174 case FLUSHING_DECODER_SHUTDOWN: 175 if (needShutdown != NULL) { 176 *needShutdown = true; 177 } 178 return true; 179 180 default: 181 return false; 182 } 183} 184 185void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { 186 switch (msg->what()) { 187 case kWhatSetDataSource: 188 { 189 ALOGV("kWhatSetDataSource"); 190 191 CHECK(mSource == NULL); 192 193 sp<RefBase> obj; 194 CHECK(msg->findObject("source", &obj)); 195 196 mSource = static_cast<Source *>(obj.get()); 197 break; 198 } 199 200 case kWhatSetVideoNativeWindow: 201 { 202 ALOGV("kWhatSetVideoNativeWindow"); 203 204 sp<RefBase> obj; 205 CHECK(msg->findObject("native-window", &obj)); 206 207 mNativeWindow = static_cast<NativeWindowWrapper *>(obj.get()); 208 break; 209 } 210 211 case kWhatSetAudioSink: 212 { 213 ALOGV("kWhatSetAudioSink"); 214 215 sp<RefBase> obj; 216 CHECK(msg->findObject("sink", &obj)); 217 218 mAudioSink = static_cast<MediaPlayerBase::AudioSink *>(obj.get()); 219 break; 220 } 221 222 case kWhatStart: 223 { 224 ALOGV("kWhatStart"); 225 226 mVideoIsAVC = false; 227 mAudioEOS = false; 228 mVideoEOS = false; 229 mSkipRenderingAudioUntilMediaTimeUs = -1; 230 mSkipRenderingVideoUntilMediaTimeUs = -1; 231 mVideoLateByUs = 0; 232 mNumFramesTotal = 0; 233 mNumFramesDropped = 0; 234 235 mSource->start(); 236 237 mRenderer = new Renderer( 238 mAudioSink, 239 new AMessage(kWhatRendererNotify, id())); 240 241 looper()->registerHandler(mRenderer); 242 243 postScanSources(); 244 break; 245 } 246 247 case kWhatScanSources: 248 { 249 int32_t generation; 250 CHECK(msg->findInt32("generation", &generation)); 251 if (generation != mScanSourcesGeneration) { 252 // Drop obsolete msg. 253 break; 254 } 255 256 mScanSourcesPending = false; 257 258 ALOGV("scanning sources haveAudio=%d, haveVideo=%d", 259 mAudioDecoder != NULL, mVideoDecoder != NULL); 260 261 instantiateDecoder(false, &mVideoDecoder); 262 263 if (mAudioSink != NULL) { 264 instantiateDecoder(true, &mAudioDecoder); 265 } 266 267 status_t err; 268 if ((err = mSource->feedMoreTSData()) != OK) { 269 if (mAudioDecoder == NULL && mVideoDecoder == NULL) { 270 // We're not currently decoding anything (no audio or 271 // video tracks found) and we just ran out of input data. 272 273 if (err == ERROR_END_OF_STREAM) { 274 notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0); 275 } else { 276 notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err); 277 } 278 } 279 break; 280 } 281 282 if (mAudioDecoder == NULL || mVideoDecoder == NULL) { 283 msg->post(100000ll); 284 mScanSourcesPending = true; 285 } 286 break; 287 } 288 289 case kWhatVideoNotify: 290 case kWhatAudioNotify: 291 { 292 bool audio = msg->what() == kWhatAudioNotify; 293 294 sp<AMessage> codecRequest; 295 CHECK(msg->findMessage("codec-request", &codecRequest)); 296 297 int32_t what; 298 CHECK(codecRequest->findInt32("what", &what)); 299 300 if (what == ACodec::kWhatFillThisBuffer) { 301 status_t err = feedDecoderInputData( 302 audio, codecRequest); 303 304 if (err == -EWOULDBLOCK) { 305 if (mSource->feedMoreTSData() == OK) { 306 msg->post(10000ll); 307 } 308 } 309 } else if (what == ACodec::kWhatEOS) { 310 int32_t err; 311 CHECK(codecRequest->findInt32("err", &err)); 312 313 if (err == ERROR_END_OF_STREAM) { 314 ALOGV("got %s decoder EOS", audio ? "audio" : "video"); 315 } else { 316 ALOGV("got %s decoder EOS w/ error %d", 317 audio ? "audio" : "video", 318 err); 319 } 320 321 mRenderer->queueEOS(audio, err); 322 } else if (what == ACodec::kWhatFlushCompleted) { 323 bool needShutdown; 324 325 if (audio) { 326 CHECK(IsFlushingState(mFlushingAudio, &needShutdown)); 327 mFlushingAudio = FLUSHED; 328 } else { 329 CHECK(IsFlushingState(mFlushingVideo, &needShutdown)); 330 mFlushingVideo = FLUSHED; 331 332 mVideoLateByUs = 0; 333 } 334 335 ALOGV("decoder %s flush completed", audio ? "audio" : "video"); 336 337 if (needShutdown) { 338 ALOGV("initiating %s decoder shutdown", 339 audio ? "audio" : "video"); 340 341 (audio ? mAudioDecoder : mVideoDecoder)->initiateShutdown(); 342 343 if (audio) { 344 mFlushingAudio = SHUTTING_DOWN_DECODER; 345 } else { 346 mFlushingVideo = SHUTTING_DOWN_DECODER; 347 } 348 } 349 350 finishFlushIfPossible(); 351 } else if (what == ACodec::kWhatOutputFormatChanged) { 352 if (audio) { 353 int32_t numChannels; 354 CHECK(codecRequest->findInt32("channel-count", &numChannels)); 355 356 int32_t sampleRate; 357 CHECK(codecRequest->findInt32("sample-rate", &sampleRate)); 358 359 ALOGV("Audio output format changed to %d Hz, %d channels", 360 sampleRate, numChannels); 361 362 mAudioSink->close(); 363 364 audio_output_flags_t flags; 365 int64_t durationUs; 366 // FIXME: we should handle the case where the video decoder is created after 367 // we receive the format change indication. Current code will just make that 368 // we select deep buffer with video which should not be a problem as it should 369 // not prevent from keeping A/V sync. 370 if (mVideoDecoder == NULL && 371 mSource->getDuration(&durationUs) == OK && 372 durationUs > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) { 373 flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER; 374 } else { 375 flags = AUDIO_OUTPUT_FLAG_NONE; 376 } 377 378 int32_t channelMask; 379 if (!codecRequest->findInt32("channel-mask", &channelMask)) { 380 channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER; 381 } 382 383 CHECK_EQ(mAudioSink->open( 384 sampleRate, 385 numChannels, 386 (audio_channel_mask_t)channelMask, 387 AUDIO_FORMAT_PCM_16_BIT, 388 8 /* bufferCount */, 389 NULL, 390 NULL, 391 flags), 392 (status_t)OK); 393 mAudioSink->start(); 394 395 mRenderer->signalAudioSinkChanged(); 396 } else { 397 // video 398 399 int32_t width, height; 400 CHECK(codecRequest->findInt32("width", &width)); 401 CHECK(codecRequest->findInt32("height", &height)); 402 403 int32_t cropLeft, cropTop, cropRight, cropBottom; 404 CHECK(codecRequest->findRect( 405 "crop", 406 &cropLeft, &cropTop, &cropRight, &cropBottom)); 407 408 ALOGV("Video output format changed to %d x %d " 409 "(crop: %d x %d @ (%d, %d))", 410 width, height, 411 (cropRight - cropLeft + 1), 412 (cropBottom - cropTop + 1), 413 cropLeft, cropTop); 414 415 notifyListener( 416 MEDIA_SET_VIDEO_SIZE, 417 cropRight - cropLeft + 1, 418 cropBottom - cropTop + 1); 419 } 420 } else if (what == ACodec::kWhatShutdownCompleted) { 421 ALOGV("%s shutdown completed", audio ? "audio" : "video"); 422 if (audio) { 423 mAudioDecoder.clear(); 424 425 CHECK_EQ((int)mFlushingAudio, (int)SHUTTING_DOWN_DECODER); 426 mFlushingAudio = SHUT_DOWN; 427 } else { 428 mVideoDecoder.clear(); 429 430 CHECK_EQ((int)mFlushingVideo, (int)SHUTTING_DOWN_DECODER); 431 mFlushingVideo = SHUT_DOWN; 432 } 433 434 finishFlushIfPossible(); 435 } else if (what == ACodec::kWhatError) { 436 ALOGE("Received error from %s decoder, aborting playback.", 437 audio ? "audio" : "video"); 438 439 mRenderer->queueEOS(audio, UNKNOWN_ERROR); 440 } else if (what == ACodec::kWhatDrainThisBuffer) { 441 renderBuffer(audio, codecRequest); 442 } else { 443 ALOGV("Unhandled codec notification %d.", what); 444 } 445 446 break; 447 } 448 449 case kWhatRendererNotify: 450 { 451 int32_t what; 452 CHECK(msg->findInt32("what", &what)); 453 454 if (what == Renderer::kWhatEOS) { 455 int32_t audio; 456 CHECK(msg->findInt32("audio", &audio)); 457 458 int32_t finalResult; 459 CHECK(msg->findInt32("finalResult", &finalResult)); 460 461 if (audio) { 462 mAudioEOS = true; 463 } else { 464 mVideoEOS = true; 465 } 466 467 if (finalResult == ERROR_END_OF_STREAM) { 468 ALOGV("reached %s EOS", audio ? "audio" : "video"); 469 } else { 470 ALOGE("%s track encountered an error (%d)", 471 audio ? "audio" : "video", finalResult); 472 473 notifyListener( 474 MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, finalResult); 475 } 476 477 if ((mAudioEOS || mAudioDecoder == NULL) 478 && (mVideoEOS || mVideoDecoder == NULL)) { 479 notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0); 480 } 481 } else if (what == Renderer::kWhatPosition) { 482 int64_t positionUs; 483 CHECK(msg->findInt64("positionUs", &positionUs)); 484 485 CHECK(msg->findInt64("videoLateByUs", &mVideoLateByUs)); 486 487 if (mDriver != NULL) { 488 sp<NuPlayerDriver> driver = mDriver.promote(); 489 if (driver != NULL) { 490 driver->notifyPosition(positionUs); 491 492 driver->notifyFrameStats( 493 mNumFramesTotal, mNumFramesDropped); 494 } 495 } 496 } else if (what == Renderer::kWhatFlushComplete) { 497 CHECK_EQ(what, (int32_t)Renderer::kWhatFlushComplete); 498 499 int32_t audio; 500 CHECK(msg->findInt32("audio", &audio)); 501 502 ALOGV("renderer %s flush completed.", audio ? "audio" : "video"); 503 } 504 break; 505 } 506 507 case kWhatMoreDataQueued: 508 { 509 break; 510 } 511 512 case kWhatReset: 513 { 514 ALOGV("kWhatReset"); 515 516 if (mRenderer != NULL) { 517 // There's an edge case where the renderer owns all output 518 // buffers and is paused, therefore the decoder will not read 519 // more input data and will never encounter the matching 520 // discontinuity. To avoid this, we resume the renderer. 521 522 if (mFlushingAudio == AWAITING_DISCONTINUITY 523 || mFlushingVideo == AWAITING_DISCONTINUITY) { 524 mRenderer->resume(); 525 } 526 } 527 528 if (mFlushingAudio != NONE || mFlushingVideo != NONE) { 529 // We're currently flushing, postpone the reset until that's 530 // completed. 531 532 ALOGV("postponing reset mFlushingAudio=%d, mFlushingVideo=%d", 533 mFlushingAudio, mFlushingVideo); 534 535 mResetPostponed = true; 536 break; 537 } 538 539 if (mAudioDecoder == NULL && mVideoDecoder == NULL) { 540 finishReset(); 541 break; 542 } 543 544 mTimeDiscontinuityPending = true; 545 546 if (mAudioDecoder != NULL) { 547 flushDecoder(true /* audio */, true /* needShutdown */); 548 } 549 550 if (mVideoDecoder != NULL) { 551 flushDecoder(false /* audio */, true /* needShutdown */); 552 } 553 554 mResetInProgress = true; 555 break; 556 } 557 558 case kWhatSeek: 559 { 560 int64_t seekTimeUs; 561 CHECK(msg->findInt64("seekTimeUs", &seekTimeUs)); 562 563 ALOGV("kWhatSeek seekTimeUs=%lld us (%.2f secs)", 564 seekTimeUs, seekTimeUs / 1E6); 565 566 mSource->seekTo(seekTimeUs); 567 568 if (mDriver != NULL) { 569 sp<NuPlayerDriver> driver = mDriver.promote(); 570 if (driver != NULL) { 571 driver->notifySeekComplete(); 572 } 573 } 574 575 break; 576 } 577 578 case kWhatPause: 579 { 580 CHECK(mRenderer != NULL); 581 mRenderer->pause(); 582 break; 583 } 584 585 case kWhatResume: 586 { 587 CHECK(mRenderer != NULL); 588 mRenderer->resume(); 589 break; 590 } 591 592 default: 593 TRESPASS(); 594 break; 595 } 596} 597 598void NuPlayer::finishFlushIfPossible() { 599 if (mFlushingAudio != FLUSHED && mFlushingAudio != SHUT_DOWN) { 600 return; 601 } 602 603 if (mFlushingVideo != FLUSHED && mFlushingVideo != SHUT_DOWN) { 604 return; 605 } 606 607 ALOGV("both audio and video are flushed now."); 608 609 if (mTimeDiscontinuityPending) { 610 mRenderer->signalTimeDiscontinuity(); 611 mTimeDiscontinuityPending = false; 612 } 613 614 if (mAudioDecoder != NULL) { 615 mAudioDecoder->signalResume(); 616 } 617 618 if (mVideoDecoder != NULL) { 619 mVideoDecoder->signalResume(); 620 } 621 622 mFlushingAudio = NONE; 623 mFlushingVideo = NONE; 624 625 if (mResetInProgress) { 626 ALOGV("reset completed"); 627 628 mResetInProgress = false; 629 finishReset(); 630 } else if (mResetPostponed) { 631 (new AMessage(kWhatReset, id()))->post(); 632 mResetPostponed = false; 633 } else if (mAudioDecoder == NULL || mVideoDecoder == NULL) { 634 postScanSources(); 635 } 636} 637 638void NuPlayer::finishReset() { 639 CHECK(mAudioDecoder == NULL); 640 CHECK(mVideoDecoder == NULL); 641 642 ++mScanSourcesGeneration; 643 mScanSourcesPending = false; 644 645 mRenderer.clear(); 646 647 if (mSource != NULL) { 648 mSource->stop(); 649 mSource.clear(); 650 } 651 652 if (mDriver != NULL) { 653 sp<NuPlayerDriver> driver = mDriver.promote(); 654 if (driver != NULL) { 655 driver->notifyResetComplete(); 656 } 657 } 658} 659 660void NuPlayer::postScanSources() { 661 if (mScanSourcesPending) { 662 return; 663 } 664 665 sp<AMessage> msg = new AMessage(kWhatScanSources, id()); 666 msg->setInt32("generation", mScanSourcesGeneration); 667 msg->post(); 668 669 mScanSourcesPending = true; 670} 671 672status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) { 673 if (*decoder != NULL) { 674 return OK; 675 } 676 677 sp<MetaData> meta = mSource->getFormat(audio); 678 679 if (meta == NULL) { 680 return -EWOULDBLOCK; 681 } 682 683 if (!audio) { 684 const char *mime; 685 CHECK(meta->findCString(kKeyMIMEType, &mime)); 686 mVideoIsAVC = !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime); 687 } 688 689 sp<AMessage> notify = 690 new AMessage(audio ? kWhatAudioNotify : kWhatVideoNotify, 691 id()); 692 693 *decoder = audio ? new Decoder(notify) : 694 new Decoder(notify, mNativeWindow); 695 looper()->registerHandler(*decoder); 696 697 (*decoder)->configure(meta); 698 699 int64_t durationUs; 700 if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) { 701 sp<NuPlayerDriver> driver = mDriver.promote(); 702 if (driver != NULL) { 703 driver->notifyDuration(durationUs); 704 } 705 } 706 707 return OK; 708} 709 710status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) { 711 sp<AMessage> reply; 712 CHECK(msg->findMessage("reply", &reply)); 713 714 if ((audio && IsFlushingState(mFlushingAudio)) 715 || (!audio && IsFlushingState(mFlushingVideo))) { 716 reply->setInt32("err", INFO_DISCONTINUITY); 717 reply->post(); 718 return OK; 719 } 720 721 sp<ABuffer> accessUnit; 722 723 bool dropAccessUnit; 724 do { 725 status_t err = mSource->dequeueAccessUnit(audio, &accessUnit); 726 727 if (err == -EWOULDBLOCK) { 728 return err; 729 } else if (err != OK) { 730 if (err == INFO_DISCONTINUITY) { 731 int32_t type; 732 CHECK(accessUnit->meta()->findInt32("discontinuity", &type)); 733 734 bool formatChange = 735 (audio && 736 (type & ATSParser::DISCONTINUITY_AUDIO_FORMAT)) 737 || (!audio && 738 (type & ATSParser::DISCONTINUITY_VIDEO_FORMAT)); 739 740 bool timeChange = (type & ATSParser::DISCONTINUITY_TIME) != 0; 741 742 ALOGI("%s discontinuity (formatChange=%d, time=%d)", 743 audio ? "audio" : "video", formatChange, timeChange); 744 745 if (audio) { 746 mSkipRenderingAudioUntilMediaTimeUs = -1; 747 } else { 748 mSkipRenderingVideoUntilMediaTimeUs = -1; 749 } 750 751 if (timeChange) { 752 sp<AMessage> extra; 753 if (accessUnit->meta()->findMessage("extra", &extra) 754 && extra != NULL) { 755 int64_t resumeAtMediaTimeUs; 756 if (extra->findInt64( 757 "resume-at-mediatimeUs", &resumeAtMediaTimeUs)) { 758 ALOGI("suppressing rendering of %s until %lld us", 759 audio ? "audio" : "video", resumeAtMediaTimeUs); 760 761 if (audio) { 762 mSkipRenderingAudioUntilMediaTimeUs = 763 resumeAtMediaTimeUs; 764 } else { 765 mSkipRenderingVideoUntilMediaTimeUs = 766 resumeAtMediaTimeUs; 767 } 768 } 769 } 770 } 771 772 mTimeDiscontinuityPending = 773 mTimeDiscontinuityPending || timeChange; 774 775 if (formatChange || timeChange) { 776 flushDecoder(audio, formatChange); 777 } else { 778 // This stream is unaffected by the discontinuity 779 780 if (audio) { 781 mFlushingAudio = FLUSHED; 782 } else { 783 mFlushingVideo = FLUSHED; 784 } 785 786 finishFlushIfPossible(); 787 788 return -EWOULDBLOCK; 789 } 790 } 791 792 reply->setInt32("err", err); 793 reply->post(); 794 return OK; 795 } 796 797 if (!audio) { 798 ++mNumFramesTotal; 799 } 800 801 dropAccessUnit = false; 802 if (!audio 803 && mVideoLateByUs > 100000ll 804 && mVideoIsAVC 805 && !IsAVCReferenceFrame(accessUnit)) { 806 dropAccessUnit = true; 807 ++mNumFramesDropped; 808 } 809 } while (dropAccessUnit); 810 811 // ALOGV("returned a valid buffer of %s data", audio ? "audio" : "video"); 812 813#if 0 814 int64_t mediaTimeUs; 815 CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs)); 816 ALOGV("feeding %s input buffer at media time %.2f secs", 817 audio ? "audio" : "video", 818 mediaTimeUs / 1E6); 819#endif 820 821 reply->setBuffer("buffer", accessUnit); 822 reply->post(); 823 824 return OK; 825} 826 827void NuPlayer::renderBuffer(bool audio, const sp<AMessage> &msg) { 828 // ALOGV("renderBuffer %s", audio ? "audio" : "video"); 829 830 sp<AMessage> reply; 831 CHECK(msg->findMessage("reply", &reply)); 832 833 if (IsFlushingState(audio ? mFlushingAudio : mFlushingVideo)) { 834 // We're currently attempting to flush the decoder, in order 835 // to complete this, the decoder wants all its buffers back, 836 // so we don't want any output buffers it sent us (from before 837 // we initiated the flush) to be stuck in the renderer's queue. 838 839 ALOGV("we're still flushing the %s decoder, sending its output buffer" 840 " right back.", audio ? "audio" : "video"); 841 842 reply->post(); 843 return; 844 } 845 846 sp<ABuffer> buffer; 847 CHECK(msg->findBuffer("buffer", &buffer)); 848 849 int64_t &skipUntilMediaTimeUs = 850 audio 851 ? mSkipRenderingAudioUntilMediaTimeUs 852 : mSkipRenderingVideoUntilMediaTimeUs; 853 854 if (skipUntilMediaTimeUs >= 0) { 855 int64_t mediaTimeUs; 856 CHECK(buffer->meta()->findInt64("timeUs", &mediaTimeUs)); 857 858 if (mediaTimeUs < skipUntilMediaTimeUs) { 859 ALOGV("dropping %s buffer at time %lld as requested.", 860 audio ? "audio" : "video", 861 mediaTimeUs); 862 863 reply->post(); 864 return; 865 } 866 867 skipUntilMediaTimeUs = -1; 868 } 869 870 mRenderer->queueBuffer(audio, buffer, reply); 871} 872 873void NuPlayer::notifyListener(int msg, int ext1, int ext2) { 874 if (mDriver == NULL) { 875 return; 876 } 877 878 sp<NuPlayerDriver> driver = mDriver.promote(); 879 880 if (driver == NULL) { 881 return; 882 } 883 884 driver->notifyListener(msg, ext1, ext2); 885} 886 887void NuPlayer::flushDecoder(bool audio, bool needShutdown) { 888 if ((audio && mAudioDecoder == NULL) || (!audio && mVideoDecoder == NULL)) { 889 ALOGI("flushDecoder %s without decoder present", 890 audio ? "audio" : "video"); 891 } 892 893 // Make sure we don't continue to scan sources until we finish flushing. 894 ++mScanSourcesGeneration; 895 mScanSourcesPending = false; 896 897 (audio ? mAudioDecoder : mVideoDecoder)->signalFlush(); 898 mRenderer->flush(audio); 899 900 FlushStatus newStatus = 901 needShutdown ? FLUSHING_DECODER_SHUTDOWN : FLUSHING_DECODER; 902 903 if (audio) { 904 CHECK(mFlushingAudio == NONE 905 || mFlushingAudio == AWAITING_DISCONTINUITY); 906 907 mFlushingAudio = newStatus; 908 909 if (mFlushingVideo == NONE) { 910 mFlushingVideo = (mVideoDecoder != NULL) 911 ? AWAITING_DISCONTINUITY 912 : FLUSHED; 913 } 914 } else { 915 CHECK(mFlushingVideo == NONE 916 || mFlushingVideo == AWAITING_DISCONTINUITY); 917 918 mFlushingVideo = newStatus; 919 920 if (mFlushingAudio == NONE) { 921 mFlushingAudio = (mAudioDecoder != NULL) 922 ? AWAITING_DISCONTINUITY 923 : FLUSHED; 924 } 925 } 926} 927 928} // namespace android 929