NuPlayer.cpp revision 5d246efa220a7c7b22e490576c488b3853c664dd
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "NuPlayer" 19#include <utils/Log.h> 20 21#include "NuPlayer.h" 22 23#include "HTTPLiveSource.h" 24#include "NuPlayerDecoder.h" 25#include "NuPlayerDriver.h" 26#include "NuPlayerRenderer.h" 27#include "NuPlayerSource.h" 28#include "RTSPSource.h" 29#include "StreamingSource.h" 30#include "GenericSource.h" 31 32#include "ATSParser.h" 33 34#include <media/stagefright/foundation/hexdump.h> 35#include <media/stagefright/foundation/ABuffer.h> 36#include <media/stagefright/foundation/ADebug.h> 37#include <media/stagefright/foundation/AMessage.h> 38#include <media/stagefright/ACodec.h> 39#include <media/stagefright/MediaDefs.h> 40#include <media/stagefright/MediaErrors.h> 41#include <media/stagefright/MetaData.h> 42#include <gui/ISurfaceTexture.h> 43 44#include "avc_utils.h" 45 46namespace android { 47 48//////////////////////////////////////////////////////////////////////////////// 49 50NuPlayer::NuPlayer() 51 : mUIDValid(false), 52 mVideoIsAVC(false), 53 mAudioEOS(false), 54 mVideoEOS(false), 55 mScanSourcesPending(false), 56 mScanSourcesGeneration(0), 57 mTimeDiscontinuityPending(false), 58 mFlushingAudio(NONE), 59 mFlushingVideo(NONE), 60 mResetInProgress(false), 61 mResetPostponed(false), 62 mSkipRenderingAudioUntilMediaTimeUs(-1ll), 63 mSkipRenderingVideoUntilMediaTimeUs(-1ll), 64 mVideoLateByUs(0ll), 65 mNumFramesTotal(0ll), 66 mNumFramesDropped(0ll) { 67} 68 69NuPlayer::~NuPlayer() { 70} 71 72void NuPlayer::setUID(uid_t uid) { 73 mUIDValid = true; 74 mUID = uid; 75} 76 77void NuPlayer::setDriver(const wp<NuPlayerDriver> &driver) { 78 mDriver = driver; 79} 80 81void NuPlayer::setDataSource(const sp<IStreamSource> &source) { 82 sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); 83 84 msg->setObject("source", new StreamingSource(source)); 85 msg->post(); 86} 87 88static bool IsHTTPLiveURL(const char *url) { 89 if (!strncasecmp("http://", url, 7) 90 || !strncasecmp("https://", url, 8)) { 91 size_t len = strlen(url); 92 if (len >= 5 && !strcasecmp(".m3u8", &url[len - 5])) { 93 return true; 94 } 95 96 if (strstr(url,"m3u8")) { 97 return true; 98 } 99 } 100 101 return false; 102} 103 104void NuPlayer::setDataSource( 105 const char *url, const KeyedVector<String8, String8> *headers) { 106 sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); 107 108 sp<Source> source; 109 if (IsHTTPLiveURL(url)) { 110 source = new HTTPLiveSource(url, headers, mUIDValid, mUID); 111 } else if (!strncasecmp(url, "rtsp://", 7)) { 112 source = new RTSPSource(url, headers, mUIDValid, mUID); 113 } else { 114 source = new GenericSource(url, headers, mUIDValid, mUID); 115 } 116 117 msg->setObject("source", source); 118 msg->post(); 119} 120 121void NuPlayer::setDataSource(int fd, int64_t offset, int64_t length) { 122 sp<AMessage> msg = new AMessage(kWhatSetDataSource, id()); 123 124 sp<Source> source = new GenericSource(fd, offset, length); 125 msg->setObject("source", source); 126 msg->post(); 127} 128 129void NuPlayer::setVideoSurfaceTexture(const sp<ISurfaceTexture> &surfaceTexture) { 130 sp<AMessage> msg = new AMessage(kWhatSetVideoNativeWindow, id()); 131 sp<SurfaceTextureClient> surfaceTextureClient(surfaceTexture != NULL ? 132 new SurfaceTextureClient(surfaceTexture) : NULL); 133 msg->setObject("native-window", new NativeWindowWrapper(surfaceTextureClient)); 134 msg->post(); 135} 136 137void NuPlayer::setAudioSink(const sp<MediaPlayerBase::AudioSink> &sink) { 138 sp<AMessage> msg = new AMessage(kWhatSetAudioSink, id()); 139 msg->setObject("sink", sink); 140 msg->post(); 141} 142 143void NuPlayer::start() { 144 (new AMessage(kWhatStart, id()))->post(); 145} 146 147void NuPlayer::pause() { 148 (new AMessage(kWhatPause, id()))->post(); 149} 150 151void NuPlayer::resume() { 152 (new AMessage(kWhatResume, id()))->post(); 153} 154 155void NuPlayer::resetAsync() { 156 (new AMessage(kWhatReset, id()))->post(); 157} 158 159void NuPlayer::seekToAsync(int64_t seekTimeUs) { 160 sp<AMessage> msg = new AMessage(kWhatSeek, id()); 161 msg->setInt64("seekTimeUs", seekTimeUs); 162 msg->post(); 163} 164 165// static 166bool NuPlayer::IsFlushingState(FlushStatus state, bool *needShutdown) { 167 switch (state) { 168 case FLUSHING_DECODER: 169 if (needShutdown != NULL) { 170 *needShutdown = false; 171 } 172 return true; 173 174 case FLUSHING_DECODER_SHUTDOWN: 175 if (needShutdown != NULL) { 176 *needShutdown = true; 177 } 178 return true; 179 180 default: 181 return false; 182 } 183} 184 185void NuPlayer::onMessageReceived(const sp<AMessage> &msg) { 186 switch (msg->what()) { 187 case kWhatSetDataSource: 188 { 189 ALOGV("kWhatSetDataSource"); 190 191 CHECK(mSource == NULL); 192 193 sp<RefBase> obj; 194 CHECK(msg->findObject("source", &obj)); 195 196 mSource = static_cast<Source *>(obj.get()); 197 break; 198 } 199 200 case kWhatSetVideoNativeWindow: 201 { 202 ALOGV("kWhatSetVideoNativeWindow"); 203 204 sp<RefBase> obj; 205 CHECK(msg->findObject("native-window", &obj)); 206 207 mNativeWindow = static_cast<NativeWindowWrapper *>(obj.get()); 208 break; 209 } 210 211 case kWhatSetAudioSink: 212 { 213 ALOGV("kWhatSetAudioSink"); 214 215 sp<RefBase> obj; 216 CHECK(msg->findObject("sink", &obj)); 217 218 mAudioSink = static_cast<MediaPlayerBase::AudioSink *>(obj.get()); 219 break; 220 } 221 222 case kWhatStart: 223 { 224 ALOGV("kWhatStart"); 225 226 mVideoIsAVC = false; 227 mAudioEOS = false; 228 mVideoEOS = false; 229 mSkipRenderingAudioUntilMediaTimeUs = -1; 230 mSkipRenderingVideoUntilMediaTimeUs = -1; 231 mVideoLateByUs = 0; 232 mNumFramesTotal = 0; 233 mNumFramesDropped = 0; 234 235 mSource->start(); 236 237 mRenderer = new Renderer( 238 mAudioSink, 239 new AMessage(kWhatRendererNotify, id())); 240 241 looper()->registerHandler(mRenderer); 242 243 postScanSources(); 244 break; 245 } 246 247 case kWhatScanSources: 248 { 249 int32_t generation; 250 CHECK(msg->findInt32("generation", &generation)); 251 if (generation != mScanSourcesGeneration) { 252 // Drop obsolete msg. 253 break; 254 } 255 256 mScanSourcesPending = false; 257 258 ALOGV("scanning sources haveAudio=%d, haveVideo=%d", 259 mAudioDecoder != NULL, mVideoDecoder != NULL); 260 261 if (mNativeWindow != NULL) { 262 instantiateDecoder(false, &mVideoDecoder); 263 } 264 265 if (mAudioSink != NULL) { 266 instantiateDecoder(true, &mAudioDecoder); 267 } 268 269 status_t err; 270 if ((err = mSource->feedMoreTSData()) != OK) { 271 if (mAudioDecoder == NULL && mVideoDecoder == NULL) { 272 // We're not currently decoding anything (no audio or 273 // video tracks found) and we just ran out of input data. 274 275 if (err == ERROR_END_OF_STREAM) { 276 notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0); 277 } else { 278 notifyListener(MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, err); 279 } 280 } 281 break; 282 } 283 284 if (mAudioDecoder == NULL && mAudioSink != NULL || 285 mVideoDecoder == NULL && mNativeWindow != NULL) { 286 msg->post(100000ll); 287 mScanSourcesPending = true; 288 } 289 break; 290 } 291 292 case kWhatVideoNotify: 293 case kWhatAudioNotify: 294 { 295 bool audio = msg->what() == kWhatAudioNotify; 296 297 sp<AMessage> codecRequest; 298 CHECK(msg->findMessage("codec-request", &codecRequest)); 299 300 int32_t what; 301 CHECK(codecRequest->findInt32("what", &what)); 302 303 if (what == ACodec::kWhatFillThisBuffer) { 304 status_t err = feedDecoderInputData( 305 audio, codecRequest); 306 307 if (err == -EWOULDBLOCK) { 308 if (mSource->feedMoreTSData() == OK) { 309 msg->post(10000ll); 310 } 311 } 312 } else if (what == ACodec::kWhatEOS) { 313 int32_t err; 314 CHECK(codecRequest->findInt32("err", &err)); 315 316 if (err == ERROR_END_OF_STREAM) { 317 ALOGV("got %s decoder EOS", audio ? "audio" : "video"); 318 } else { 319 ALOGV("got %s decoder EOS w/ error %d", 320 audio ? "audio" : "video", 321 err); 322 } 323 324 mRenderer->queueEOS(audio, err); 325 } else if (what == ACodec::kWhatFlushCompleted) { 326 bool needShutdown; 327 328 if (audio) { 329 CHECK(IsFlushingState(mFlushingAudio, &needShutdown)); 330 mFlushingAudio = FLUSHED; 331 } else { 332 CHECK(IsFlushingState(mFlushingVideo, &needShutdown)); 333 mFlushingVideo = FLUSHED; 334 335 mVideoLateByUs = 0; 336 } 337 338 ALOGV("decoder %s flush completed", audio ? "audio" : "video"); 339 340 if (needShutdown) { 341 ALOGV("initiating %s decoder shutdown", 342 audio ? "audio" : "video"); 343 344 (audio ? mAudioDecoder : mVideoDecoder)->initiateShutdown(); 345 346 if (audio) { 347 mFlushingAudio = SHUTTING_DOWN_DECODER; 348 } else { 349 mFlushingVideo = SHUTTING_DOWN_DECODER; 350 } 351 } 352 353 finishFlushIfPossible(); 354 } else if (what == ACodec::kWhatOutputFormatChanged) { 355 if (audio) { 356 int32_t numChannels; 357 CHECK(codecRequest->findInt32("channel-count", &numChannels)); 358 359 int32_t sampleRate; 360 CHECK(codecRequest->findInt32("sample-rate", &sampleRate)); 361 362 ALOGV("Audio output format changed to %d Hz, %d channels", 363 sampleRate, numChannels); 364 365 mAudioSink->close(); 366 367 audio_output_flags_t flags; 368 int64_t durationUs; 369 // FIXME: we should handle the case where the video decoder is created after 370 // we receive the format change indication. Current code will just make that 371 // we select deep buffer with video which should not be a problem as it should 372 // not prevent from keeping A/V sync. 373 if (mVideoDecoder == NULL && 374 mSource->getDuration(&durationUs) == OK && 375 durationUs > AUDIO_SINK_MIN_DEEP_BUFFER_DURATION_US) { 376 flags = AUDIO_OUTPUT_FLAG_DEEP_BUFFER; 377 } else { 378 flags = AUDIO_OUTPUT_FLAG_NONE; 379 } 380 381 int32_t channelMask; 382 if (!codecRequest->findInt32("channel-mask", &channelMask)) { 383 channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER; 384 } 385 386 CHECK_EQ(mAudioSink->open( 387 sampleRate, 388 numChannels, 389 (audio_channel_mask_t)channelMask, 390 AUDIO_FORMAT_PCM_16_BIT, 391 8 /* bufferCount */, 392 NULL, 393 NULL, 394 flags), 395 (status_t)OK); 396 mAudioSink->start(); 397 398 mRenderer->signalAudioSinkChanged(); 399 } else { 400 // video 401 402 int32_t width, height; 403 CHECK(codecRequest->findInt32("width", &width)); 404 CHECK(codecRequest->findInt32("height", &height)); 405 406 int32_t cropLeft, cropTop, cropRight, cropBottom; 407 CHECK(codecRequest->findRect( 408 "crop", 409 &cropLeft, &cropTop, &cropRight, &cropBottom)); 410 411 ALOGV("Video output format changed to %d x %d " 412 "(crop: %d x %d @ (%d, %d))", 413 width, height, 414 (cropRight - cropLeft + 1), 415 (cropBottom - cropTop + 1), 416 cropLeft, cropTop); 417 418 notifyListener( 419 MEDIA_SET_VIDEO_SIZE, 420 cropRight - cropLeft + 1, 421 cropBottom - cropTop + 1); 422 } 423 } else if (what == ACodec::kWhatShutdownCompleted) { 424 ALOGV("%s shutdown completed", audio ? "audio" : "video"); 425 if (audio) { 426 mAudioDecoder.clear(); 427 428 CHECK_EQ((int)mFlushingAudio, (int)SHUTTING_DOWN_DECODER); 429 mFlushingAudio = SHUT_DOWN; 430 } else { 431 mVideoDecoder.clear(); 432 433 CHECK_EQ((int)mFlushingVideo, (int)SHUTTING_DOWN_DECODER); 434 mFlushingVideo = SHUT_DOWN; 435 } 436 437 finishFlushIfPossible(); 438 } else if (what == ACodec::kWhatError) { 439 ALOGE("Received error from %s decoder, aborting playback.", 440 audio ? "audio" : "video"); 441 442 mRenderer->queueEOS(audio, UNKNOWN_ERROR); 443 } else if (what == ACodec::kWhatDrainThisBuffer) { 444 renderBuffer(audio, codecRequest); 445 } else { 446 ALOGV("Unhandled codec notification %d.", what); 447 } 448 449 break; 450 } 451 452 case kWhatRendererNotify: 453 { 454 int32_t what; 455 CHECK(msg->findInt32("what", &what)); 456 457 if (what == Renderer::kWhatEOS) { 458 int32_t audio; 459 CHECK(msg->findInt32("audio", &audio)); 460 461 int32_t finalResult; 462 CHECK(msg->findInt32("finalResult", &finalResult)); 463 464 if (audio) { 465 mAudioEOS = true; 466 } else { 467 mVideoEOS = true; 468 } 469 470 if (finalResult == ERROR_END_OF_STREAM) { 471 ALOGV("reached %s EOS", audio ? "audio" : "video"); 472 } else { 473 ALOGE("%s track encountered an error (%d)", 474 audio ? "audio" : "video", finalResult); 475 476 notifyListener( 477 MEDIA_ERROR, MEDIA_ERROR_UNKNOWN, finalResult); 478 } 479 480 if ((mAudioEOS || mAudioDecoder == NULL) 481 && (mVideoEOS || mVideoDecoder == NULL)) { 482 notifyListener(MEDIA_PLAYBACK_COMPLETE, 0, 0); 483 } 484 } else if (what == Renderer::kWhatPosition) { 485 int64_t positionUs; 486 CHECK(msg->findInt64("positionUs", &positionUs)); 487 488 CHECK(msg->findInt64("videoLateByUs", &mVideoLateByUs)); 489 490 if (mDriver != NULL) { 491 sp<NuPlayerDriver> driver = mDriver.promote(); 492 if (driver != NULL) { 493 driver->notifyPosition(positionUs); 494 495 driver->notifyFrameStats( 496 mNumFramesTotal, mNumFramesDropped); 497 } 498 } 499 } else if (what == Renderer::kWhatFlushComplete) { 500 CHECK_EQ(what, (int32_t)Renderer::kWhatFlushComplete); 501 502 int32_t audio; 503 CHECK(msg->findInt32("audio", &audio)); 504 505 ALOGV("renderer %s flush completed.", audio ? "audio" : "video"); 506 } 507 break; 508 } 509 510 case kWhatMoreDataQueued: 511 { 512 break; 513 } 514 515 case kWhatReset: 516 { 517 ALOGV("kWhatReset"); 518 519 if (mRenderer != NULL) { 520 // There's an edge case where the renderer owns all output 521 // buffers and is paused, therefore the decoder will not read 522 // more input data and will never encounter the matching 523 // discontinuity. To avoid this, we resume the renderer. 524 525 if (mFlushingAudio == AWAITING_DISCONTINUITY 526 || mFlushingVideo == AWAITING_DISCONTINUITY) { 527 mRenderer->resume(); 528 } 529 } 530 531 if (mFlushingAudio != NONE || mFlushingVideo != NONE) { 532 // We're currently flushing, postpone the reset until that's 533 // completed. 534 535 ALOGV("postponing reset mFlushingAudio=%d, mFlushingVideo=%d", 536 mFlushingAudio, mFlushingVideo); 537 538 mResetPostponed = true; 539 break; 540 } 541 542 if (mAudioDecoder == NULL && mVideoDecoder == NULL) { 543 finishReset(); 544 break; 545 } 546 547 mTimeDiscontinuityPending = true; 548 549 if (mAudioDecoder != NULL) { 550 flushDecoder(true /* audio */, true /* needShutdown */); 551 } 552 553 if (mVideoDecoder != NULL) { 554 flushDecoder(false /* audio */, true /* needShutdown */); 555 } 556 557 mResetInProgress = true; 558 break; 559 } 560 561 case kWhatSeek: 562 { 563 int64_t seekTimeUs; 564 CHECK(msg->findInt64("seekTimeUs", &seekTimeUs)); 565 566 ALOGV("kWhatSeek seekTimeUs=%lld us (%.2f secs)", 567 seekTimeUs, seekTimeUs / 1E6); 568 569 mSource->seekTo(seekTimeUs); 570 571 if (mDriver != NULL) { 572 sp<NuPlayerDriver> driver = mDriver.promote(); 573 if (driver != NULL) { 574 driver->notifySeekComplete(); 575 } 576 } 577 578 break; 579 } 580 581 case kWhatPause: 582 { 583 CHECK(mRenderer != NULL); 584 mRenderer->pause(); 585 break; 586 } 587 588 case kWhatResume: 589 { 590 CHECK(mRenderer != NULL); 591 mRenderer->resume(); 592 break; 593 } 594 595 default: 596 TRESPASS(); 597 break; 598 } 599} 600 601void NuPlayer::finishFlushIfPossible() { 602 if (mFlushingAudio != FLUSHED && mFlushingAudio != SHUT_DOWN) { 603 return; 604 } 605 606 if (mFlushingVideo != FLUSHED && mFlushingVideo != SHUT_DOWN) { 607 return; 608 } 609 610 ALOGV("both audio and video are flushed now."); 611 612 if (mTimeDiscontinuityPending) { 613 mRenderer->signalTimeDiscontinuity(); 614 mTimeDiscontinuityPending = false; 615 } 616 617 if (mAudioDecoder != NULL) { 618 mAudioDecoder->signalResume(); 619 } 620 621 if (mVideoDecoder != NULL) { 622 mVideoDecoder->signalResume(); 623 } 624 625 mFlushingAudio = NONE; 626 mFlushingVideo = NONE; 627 628 if (mResetInProgress) { 629 ALOGV("reset completed"); 630 631 mResetInProgress = false; 632 finishReset(); 633 } else if (mResetPostponed) { 634 (new AMessage(kWhatReset, id()))->post(); 635 mResetPostponed = false; 636 } else if (mAudioDecoder == NULL || mVideoDecoder == NULL) { 637 postScanSources(); 638 } 639} 640 641void NuPlayer::finishReset() { 642 CHECK(mAudioDecoder == NULL); 643 CHECK(mVideoDecoder == NULL); 644 645 ++mScanSourcesGeneration; 646 mScanSourcesPending = false; 647 648 mRenderer.clear(); 649 650 if (mSource != NULL) { 651 mSource->stop(); 652 mSource.clear(); 653 } 654 655 if (mDriver != NULL) { 656 sp<NuPlayerDriver> driver = mDriver.promote(); 657 if (driver != NULL) { 658 driver->notifyResetComplete(); 659 } 660 } 661} 662 663void NuPlayer::postScanSources() { 664 if (mScanSourcesPending) { 665 return; 666 } 667 668 sp<AMessage> msg = new AMessage(kWhatScanSources, id()); 669 msg->setInt32("generation", mScanSourcesGeneration); 670 msg->post(); 671 672 mScanSourcesPending = true; 673} 674 675status_t NuPlayer::instantiateDecoder(bool audio, sp<Decoder> *decoder) { 676 if (*decoder != NULL) { 677 return OK; 678 } 679 680 sp<MetaData> meta = mSource->getFormat(audio); 681 682 if (meta == NULL) { 683 return -EWOULDBLOCK; 684 } 685 686 if (!audio) { 687 const char *mime; 688 CHECK(meta->findCString(kKeyMIMEType, &mime)); 689 mVideoIsAVC = !strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime); 690 } 691 692 sp<AMessage> notify = 693 new AMessage(audio ? kWhatAudioNotify : kWhatVideoNotify, 694 id()); 695 696 *decoder = audio ? new Decoder(notify) : 697 new Decoder(notify, mNativeWindow); 698 looper()->registerHandler(*decoder); 699 700 (*decoder)->configure(meta); 701 702 int64_t durationUs; 703 if (mDriver != NULL && mSource->getDuration(&durationUs) == OK) { 704 sp<NuPlayerDriver> driver = mDriver.promote(); 705 if (driver != NULL) { 706 driver->notifyDuration(durationUs); 707 } 708 } 709 710 return OK; 711} 712 713status_t NuPlayer::feedDecoderInputData(bool audio, const sp<AMessage> &msg) { 714 sp<AMessage> reply; 715 CHECK(msg->findMessage("reply", &reply)); 716 717 if ((audio && IsFlushingState(mFlushingAudio)) 718 || (!audio && IsFlushingState(mFlushingVideo))) { 719 reply->setInt32("err", INFO_DISCONTINUITY); 720 reply->post(); 721 return OK; 722 } 723 724 sp<ABuffer> accessUnit; 725 726 bool dropAccessUnit; 727 do { 728 status_t err = mSource->dequeueAccessUnit(audio, &accessUnit); 729 730 if (err == -EWOULDBLOCK) { 731 return err; 732 } else if (err != OK) { 733 if (err == INFO_DISCONTINUITY) { 734 int32_t type; 735 CHECK(accessUnit->meta()->findInt32("discontinuity", &type)); 736 737 bool formatChange = 738 (audio && 739 (type & ATSParser::DISCONTINUITY_AUDIO_FORMAT)) 740 || (!audio && 741 (type & ATSParser::DISCONTINUITY_VIDEO_FORMAT)); 742 743 bool timeChange = (type & ATSParser::DISCONTINUITY_TIME) != 0; 744 745 ALOGI("%s discontinuity (formatChange=%d, time=%d)", 746 audio ? "audio" : "video", formatChange, timeChange); 747 748 if (audio) { 749 mSkipRenderingAudioUntilMediaTimeUs = -1; 750 } else { 751 mSkipRenderingVideoUntilMediaTimeUs = -1; 752 } 753 754 if (timeChange) { 755 sp<AMessage> extra; 756 if (accessUnit->meta()->findMessage("extra", &extra) 757 && extra != NULL) { 758 int64_t resumeAtMediaTimeUs; 759 if (extra->findInt64( 760 "resume-at-mediatimeUs", &resumeAtMediaTimeUs)) { 761 ALOGI("suppressing rendering of %s until %lld us", 762 audio ? "audio" : "video", resumeAtMediaTimeUs); 763 764 if (audio) { 765 mSkipRenderingAudioUntilMediaTimeUs = 766 resumeAtMediaTimeUs; 767 } else { 768 mSkipRenderingVideoUntilMediaTimeUs = 769 resumeAtMediaTimeUs; 770 } 771 } 772 } 773 } 774 775 mTimeDiscontinuityPending = 776 mTimeDiscontinuityPending || timeChange; 777 778 if (formatChange || timeChange) { 779 flushDecoder(audio, formatChange); 780 } else { 781 // This stream is unaffected by the discontinuity 782 783 if (audio) { 784 mFlushingAudio = FLUSHED; 785 } else { 786 mFlushingVideo = FLUSHED; 787 } 788 789 finishFlushIfPossible(); 790 791 return -EWOULDBLOCK; 792 } 793 } 794 795 reply->setInt32("err", err); 796 reply->post(); 797 return OK; 798 } 799 800 if (!audio) { 801 ++mNumFramesTotal; 802 } 803 804 dropAccessUnit = false; 805 if (!audio 806 && mVideoLateByUs > 100000ll 807 && mVideoIsAVC 808 && !IsAVCReferenceFrame(accessUnit)) { 809 dropAccessUnit = true; 810 ++mNumFramesDropped; 811 } 812 } while (dropAccessUnit); 813 814 // ALOGV("returned a valid buffer of %s data", audio ? "audio" : "video"); 815 816#if 0 817 int64_t mediaTimeUs; 818 CHECK(accessUnit->meta()->findInt64("timeUs", &mediaTimeUs)); 819 ALOGV("feeding %s input buffer at media time %.2f secs", 820 audio ? "audio" : "video", 821 mediaTimeUs / 1E6); 822#endif 823 824 reply->setBuffer("buffer", accessUnit); 825 reply->post(); 826 827 return OK; 828} 829 830void NuPlayer::renderBuffer(bool audio, const sp<AMessage> &msg) { 831 // ALOGV("renderBuffer %s", audio ? "audio" : "video"); 832 833 sp<AMessage> reply; 834 CHECK(msg->findMessage("reply", &reply)); 835 836 if (IsFlushingState(audio ? mFlushingAudio : mFlushingVideo)) { 837 // We're currently attempting to flush the decoder, in order 838 // to complete this, the decoder wants all its buffers back, 839 // so we don't want any output buffers it sent us (from before 840 // we initiated the flush) to be stuck in the renderer's queue. 841 842 ALOGV("we're still flushing the %s decoder, sending its output buffer" 843 " right back.", audio ? "audio" : "video"); 844 845 reply->post(); 846 return; 847 } 848 849 sp<ABuffer> buffer; 850 CHECK(msg->findBuffer("buffer", &buffer)); 851 852 int64_t &skipUntilMediaTimeUs = 853 audio 854 ? mSkipRenderingAudioUntilMediaTimeUs 855 : mSkipRenderingVideoUntilMediaTimeUs; 856 857 if (skipUntilMediaTimeUs >= 0) { 858 int64_t mediaTimeUs; 859 CHECK(buffer->meta()->findInt64("timeUs", &mediaTimeUs)); 860 861 if (mediaTimeUs < skipUntilMediaTimeUs) { 862 ALOGV("dropping %s buffer at time %lld as requested.", 863 audio ? "audio" : "video", 864 mediaTimeUs); 865 866 reply->post(); 867 return; 868 } 869 870 skipUntilMediaTimeUs = -1; 871 } 872 873 mRenderer->queueBuffer(audio, buffer, reply); 874} 875 876void NuPlayer::notifyListener(int msg, int ext1, int ext2) { 877 if (mDriver == NULL) { 878 return; 879 } 880 881 sp<NuPlayerDriver> driver = mDriver.promote(); 882 883 if (driver == NULL) { 884 return; 885 } 886 887 driver->notifyListener(msg, ext1, ext2); 888} 889 890void NuPlayer::flushDecoder(bool audio, bool needShutdown) { 891 if ((audio && mAudioDecoder == NULL) || (!audio && mVideoDecoder == NULL)) { 892 ALOGI("flushDecoder %s without decoder present", 893 audio ? "audio" : "video"); 894 } 895 896 // Make sure we don't continue to scan sources until we finish flushing. 897 ++mScanSourcesGeneration; 898 mScanSourcesPending = false; 899 900 (audio ? mAudioDecoder : mVideoDecoder)->signalFlush(); 901 mRenderer->flush(audio); 902 903 FlushStatus newStatus = 904 needShutdown ? FLUSHING_DECODER_SHUTDOWN : FLUSHING_DECODER; 905 906 if (audio) { 907 CHECK(mFlushingAudio == NONE 908 || mFlushingAudio == AWAITING_DISCONTINUITY); 909 910 mFlushingAudio = newStatus; 911 912 if (mFlushingVideo == NONE) { 913 mFlushingVideo = (mVideoDecoder != NULL) 914 ? AWAITING_DISCONTINUITY 915 : FLUSHED; 916 } 917 } else { 918 CHECK(mFlushingVideo == NONE 919 || mFlushingVideo == AWAITING_DISCONTINUITY); 920 921 mFlushingVideo = newStatus; 922 923 if (mFlushingAudio == NONE) { 924 mFlushingAudio = (mAudioDecoder != NULL) 925 ? AWAITING_DISCONTINUITY 926 : FLUSHED; 927 } 928 } 929} 930 931} // namespace android 932