NuPlayerRenderer.cpp revision bc7f5b2e56107cfeaeeab13cf8979379e3c2f139
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "NuPlayerRenderer" 19#include <utils/Log.h> 20 21#include "NuPlayerRenderer.h" 22 23#include <media/stagefright/foundation/ABuffer.h> 24#include <media/stagefright/foundation/ADebug.h> 25#include <media/stagefright/foundation/AMessage.h> 26 27namespace android { 28 29NuPlayer::Renderer::Renderer( 30 const sp<MediaPlayerBase::AudioSink> &sink, 31 const sp<AMessage> ¬ify) 32 : mAudioSink(sink), 33 mNotify(notify), 34 mNumFramesWritten(0), 35 mDrainAudioQueuePending(false), 36 mDrainVideoQueuePending(false), 37 mAudioQueueGeneration(0), 38 mVideoQueueGeneration(0), 39 mAnchorTimeMediaUs(-1), 40 mAnchorTimeRealUs(-1), 41 mFlushingAudio(false), 42 mFlushingVideo(false), 43 mHasAudio(false), 44 mHasVideo(false), 45 mSyncQueues(false), 46 mPaused(false) { 47} 48 49NuPlayer::Renderer::~Renderer() { 50} 51 52void NuPlayer::Renderer::queueBuffer( 53 bool audio, 54 const sp<ABuffer> &buffer, 55 const sp<AMessage> ¬ifyConsumed) { 56 sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id()); 57 msg->setInt32("audio", static_cast<int32_t>(audio)); 58 msg->setObject("buffer", buffer); 59 msg->setMessage("notifyConsumed", notifyConsumed); 60 msg->post(); 61} 62 63void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) { 64 CHECK_NE(finalResult, (status_t)OK); 65 66 sp<AMessage> msg = new AMessage(kWhatQueueEOS, id()); 67 msg->setInt32("audio", static_cast<int32_t>(audio)); 68 msg->setInt32("finalResult", finalResult); 69 msg->post(); 70} 71 72void NuPlayer::Renderer::flush(bool audio) { 73 { 74 Mutex::Autolock autoLock(mFlushLock); 75 if (audio) { 76 CHECK(!mFlushingAudio); 77 mFlushingAudio = true; 78 } else { 79 CHECK(!mFlushingVideo); 80 mFlushingVideo = true; 81 } 82 } 83 84 sp<AMessage> msg = new AMessage(kWhatFlush, id()); 85 msg->setInt32("audio", static_cast<int32_t>(audio)); 86 msg->post(); 87} 88 89void NuPlayer::Renderer::signalTimeDiscontinuity() { 90 CHECK(mAudioQueue.empty()); 91 CHECK(mVideoQueue.empty()); 92 mAnchorTimeMediaUs = -1; 93 mAnchorTimeRealUs = -1; 94 mSyncQueues = mHasAudio && mHasVideo; 95} 96 97void NuPlayer::Renderer::pause() { 98 (new AMessage(kWhatPause, id()))->post(); 99} 100 101void NuPlayer::Renderer::resume() { 102 (new AMessage(kWhatResume, id()))->post(); 103} 104 105void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { 106 switch (msg->what()) { 107 case kWhatDrainAudioQueue: 108 { 109 int32_t generation; 110 CHECK(msg->findInt32("generation", &generation)); 111 if (generation != mAudioQueueGeneration) { 112 break; 113 } 114 115 mDrainAudioQueuePending = false; 116 117 onDrainAudioQueue(); 118 119 postDrainAudioQueue(); 120 break; 121 } 122 123 case kWhatDrainVideoQueue: 124 { 125 int32_t generation; 126 CHECK(msg->findInt32("generation", &generation)); 127 if (generation != mVideoQueueGeneration) { 128 break; 129 } 130 131 mDrainVideoQueuePending = false; 132 133 onDrainVideoQueue(); 134 135 postDrainVideoQueue(); 136 break; 137 } 138 139 case kWhatQueueBuffer: 140 { 141 onQueueBuffer(msg); 142 break; 143 } 144 145 case kWhatQueueEOS: 146 { 147 onQueueEOS(msg); 148 break; 149 } 150 151 case kWhatFlush: 152 { 153 onFlush(msg); 154 break; 155 } 156 157 case kWhatAudioSinkChanged: 158 { 159 onAudioSinkChanged(); 160 break; 161 } 162 163 case kWhatPause: 164 { 165 onPause(); 166 break; 167 } 168 169 case kWhatResume: 170 { 171 onResume(); 172 break; 173 } 174 175 default: 176 TRESPASS(); 177 break; 178 } 179} 180 181void NuPlayer::Renderer::postDrainAudioQueue() { 182 if (mDrainAudioQueuePending || mSyncQueues || mPaused) { 183 return; 184 } 185 186 if (mAudioQueue.empty()) { 187 return; 188 } 189 190 mDrainAudioQueuePending = true; 191 sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id()); 192 msg->setInt32("generation", mAudioQueueGeneration); 193 msg->post(10000); 194} 195 196void NuPlayer::Renderer::signalAudioSinkChanged() { 197 (new AMessage(kWhatAudioSinkChanged, id()))->post(); 198} 199 200void NuPlayer::Renderer::onDrainAudioQueue() { 201 uint32_t numFramesPlayed; 202 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); 203 204 ssize_t numFramesAvailableToWrite = 205 mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed); 206 207 CHECK_GE(numFramesAvailableToWrite, 0); 208 209 size_t numBytesAvailableToWrite = 210 numFramesAvailableToWrite * mAudioSink->frameSize(); 211 212 while (numBytesAvailableToWrite > 0) { 213 if (mAudioQueue.empty()) { 214 break; 215 } 216 217 QueueEntry *entry = &*mAudioQueue.begin(); 218 219 if (entry->mBuffer == NULL) { 220 // EOS 221 222 notifyEOS(true /* audio */); 223 224 mAudioQueue.erase(mAudioQueue.begin()); 225 entry = NULL; 226 return; 227 } 228 229 if (entry->mOffset == 0) { 230 int64_t mediaTimeUs; 231 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 232 233 LOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 234 235 mAnchorTimeMediaUs = mediaTimeUs; 236 237 uint32_t numFramesPlayed; 238 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); 239 240 uint32_t numFramesPendingPlayout = 241 mNumFramesWritten - numFramesPlayed; 242 243 int64_t realTimeOffsetUs = 244 (mAudioSink->latency() / 2 /* XXX */ 245 + numFramesPendingPlayout 246 * mAudioSink->msecsPerFrame()) * 1000ll; 247 248 // LOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs); 249 250 mAnchorTimeRealUs = 251 ALooper::GetNowUs() + realTimeOffsetUs; 252 } 253 254 size_t copy = entry->mBuffer->size() - entry->mOffset; 255 if (copy > numBytesAvailableToWrite) { 256 copy = numBytesAvailableToWrite; 257 } 258 259 CHECK_EQ(mAudioSink->write( 260 entry->mBuffer->data() + entry->mOffset, copy), 261 (ssize_t)copy); 262 263 entry->mOffset += copy; 264 if (entry->mOffset == entry->mBuffer->size()) { 265 entry->mNotifyConsumed->post(); 266 mAudioQueue.erase(mAudioQueue.begin()); 267 entry = NULL; 268 } 269 270 numBytesAvailableToWrite -= copy; 271 mNumFramesWritten += copy / mAudioSink->frameSize(); 272 } 273 274 notifyPosition(); 275} 276 277void NuPlayer::Renderer::postDrainVideoQueue() { 278 if (mDrainVideoQueuePending || mSyncQueues || mPaused) { 279 return; 280 } 281 282 if (mVideoQueue.empty()) { 283 return; 284 } 285 286 QueueEntry &entry = *mVideoQueue.begin(); 287 288 sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id()); 289 msg->setInt32("generation", mVideoQueueGeneration); 290 291 int64_t delayUs; 292 293 if (entry.mBuffer == NULL) { 294 // EOS doesn't carry a timestamp. 295 delayUs = 0; 296 } else { 297 int64_t mediaTimeUs; 298 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 299 300 if (mAnchorTimeMediaUs < 0) { 301 delayUs = 0; 302 303 if (!mHasAudio) { 304 mAnchorTimeMediaUs = mediaTimeUs; 305 mAnchorTimeRealUs = ALooper::GetNowUs(); 306 } 307 } else { 308 int64_t realTimeUs = 309 (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs; 310 311 delayUs = realTimeUs - ALooper::GetNowUs(); 312 } 313 } 314 315 msg->post(delayUs); 316 317 mDrainVideoQueuePending = true; 318} 319 320void NuPlayer::Renderer::onDrainVideoQueue() { 321 if (mVideoQueue.empty()) { 322 return; 323 } 324 325 QueueEntry *entry = &*mVideoQueue.begin(); 326 327 if (entry->mBuffer == NULL) { 328 // EOS 329 330 notifyEOS(false /* audio */); 331 332 mVideoQueue.erase(mVideoQueue.begin()); 333 entry = NULL; 334 return; 335 } 336 337#if 0 338 int64_t mediaTimeUs; 339 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 340 341 LOGI("rendering video at media time %.2f secs", mediaTimeUs / 1E6); 342#endif 343 344 entry->mNotifyConsumed->setInt32("render", true); 345 entry->mNotifyConsumed->post(); 346 mVideoQueue.erase(mVideoQueue.begin()); 347 entry = NULL; 348 349 notifyPosition(); 350} 351 352void NuPlayer::Renderer::notifyEOS(bool audio) { 353 sp<AMessage> notify = mNotify->dup(); 354 notify->setInt32("what", kWhatEOS); 355 notify->setInt32("audio", static_cast<int32_t>(audio)); 356 notify->post(); 357} 358 359void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) { 360 int32_t audio; 361 CHECK(msg->findInt32("audio", &audio)); 362 363 if (audio) { 364 mHasAudio = true; 365 } else { 366 mHasVideo = true; 367 } 368 369 if (dropBufferWhileFlushing(audio, msg)) { 370 return; 371 } 372 373 sp<RefBase> obj; 374 CHECK(msg->findObject("buffer", &obj)); 375 sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get()); 376 377 sp<AMessage> notifyConsumed; 378 CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed)); 379 380 QueueEntry entry; 381 entry.mBuffer = buffer; 382 entry.mNotifyConsumed = notifyConsumed; 383 entry.mOffset = 0; 384 entry.mFinalResult = OK; 385 386 if (audio) { 387 mAudioQueue.push_back(entry); 388 postDrainAudioQueue(); 389 } else { 390 mVideoQueue.push_back(entry); 391 postDrainVideoQueue(); 392 } 393 394 if (mSyncQueues && !mAudioQueue.empty() && !mVideoQueue.empty()) { 395 int64_t firstAudioTimeUs; 396 int64_t firstVideoTimeUs; 397 CHECK((*mAudioQueue.begin()).mBuffer->meta() 398 ->findInt64("timeUs", &firstAudioTimeUs)); 399 CHECK((*mVideoQueue.begin()).mBuffer->meta() 400 ->findInt64("timeUs", &firstVideoTimeUs)); 401 402 int64_t diff = firstVideoTimeUs - firstAudioTimeUs; 403 404 LOGV("queueDiff = %.2f secs", diff / 1E6); 405 406 if (diff > 100000ll) { 407 // Audio data starts More than 0.1 secs before video. 408 // Drop some audio. 409 410 (*mAudioQueue.begin()).mNotifyConsumed->post(); 411 mAudioQueue.erase(mAudioQueue.begin()); 412 return; 413 } 414 415 syncQueuesDone(); 416 } 417} 418 419void NuPlayer::Renderer::syncQueuesDone() { 420 if (!mSyncQueues) { 421 return; 422 } 423 424 mSyncQueues = false; 425 426 if (!mAudioQueue.empty()) { 427 postDrainAudioQueue(); 428 } 429 430 if (!mVideoQueue.empty()) { 431 postDrainVideoQueue(); 432 } 433} 434 435void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) { 436 int32_t audio; 437 CHECK(msg->findInt32("audio", &audio)); 438 439 if (dropBufferWhileFlushing(audio, msg)) { 440 return; 441 } 442 443 int32_t finalResult; 444 CHECK(msg->findInt32("finalResult", &finalResult)); 445 446 QueueEntry entry; 447 entry.mOffset = 0; 448 entry.mFinalResult = finalResult; 449 450 if (audio) { 451 mAudioQueue.push_back(entry); 452 postDrainAudioQueue(); 453 } else { 454 mVideoQueue.push_back(entry); 455 postDrainVideoQueue(); 456 } 457} 458 459void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { 460 int32_t audio; 461 CHECK(msg->findInt32("audio", &audio)); 462 463 // If we're currently syncing the queues, i.e. dropping audio while 464 // aligning the first audio/video buffer times and only one of the 465 // two queues has data, we may starve that queue by not requesting 466 // more buffers from the decoder. If the other source then encounters 467 // a discontinuity that leads to flushing, we'll never find the 468 // corresponding discontinuity on the other queue. 469 // Therefore we'll stop syncing the queues if at least one of them 470 // is flushed. 471 syncQueuesDone(); 472 473 if (audio) { 474 flushQueue(&mAudioQueue); 475 476 Mutex::Autolock autoLock(mFlushLock); 477 mFlushingAudio = false; 478 479 mDrainAudioQueuePending = false; 480 ++mAudioQueueGeneration; 481 } else { 482 flushQueue(&mVideoQueue); 483 484 Mutex::Autolock autoLock(mFlushLock); 485 mFlushingVideo = false; 486 487 mDrainVideoQueuePending = false; 488 ++mVideoQueueGeneration; 489 } 490 491 notifyFlushComplete(audio); 492} 493 494void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) { 495 while (!queue->empty()) { 496 QueueEntry *entry = &*queue->begin(); 497 498 if (entry->mBuffer != NULL) { 499 entry->mNotifyConsumed->post(); 500 } 501 502 queue->erase(queue->begin()); 503 entry = NULL; 504 } 505} 506 507void NuPlayer::Renderer::notifyFlushComplete(bool audio) { 508 sp<AMessage> notify = mNotify->dup(); 509 notify->setInt32("what", kWhatFlushComplete); 510 notify->setInt32("audio", static_cast<int32_t>(audio)); 511 notify->post(); 512} 513 514bool NuPlayer::Renderer::dropBufferWhileFlushing( 515 bool audio, const sp<AMessage> &msg) { 516 bool flushing = false; 517 518 { 519 Mutex::Autolock autoLock(mFlushLock); 520 if (audio) { 521 flushing = mFlushingAudio; 522 } else { 523 flushing = mFlushingVideo; 524 } 525 } 526 527 if (!flushing) { 528 return false; 529 } 530 531 sp<AMessage> notifyConsumed; 532 if (msg->findMessage("notifyConsumed", ¬ifyConsumed)) { 533 notifyConsumed->post(); 534 } 535 536 return true; 537} 538 539void NuPlayer::Renderer::onAudioSinkChanged() { 540 CHECK(!mDrainAudioQueuePending); 541 mNumFramesWritten = 0; 542} 543 544void NuPlayer::Renderer::notifyPosition() { 545 if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) { 546 return; 547 } 548 549 int64_t nowUs = ALooper::GetNowUs(); 550 int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs; 551 552 sp<AMessage> notify = mNotify->dup(); 553 notify->setInt32("what", kWhatPosition); 554 notify->setInt64("positionUs", positionUs); 555 notify->post(); 556} 557 558void NuPlayer::Renderer::onPause() { 559 CHECK(!mPaused); 560 561 mDrainAudioQueuePending = false; 562 ++mAudioQueueGeneration; 563 564 mDrainVideoQueuePending = false; 565 ++mVideoQueueGeneration; 566 567 if (mHasAudio) { 568 mAudioSink->pause(); 569 } 570 571 mPaused = true; 572} 573 574void NuPlayer::Renderer::onResume() { 575 CHECK(mPaused); 576 577 if (mHasAudio) { 578 mAudioSink->start(); 579 } 580 581 mPaused = false; 582 583 if (!mAudioQueue.empty()) { 584 postDrainAudioQueue(); 585 } 586 587 if (!mVideoQueue.empty()) { 588 postDrainVideoQueue(); 589 } 590} 591 592} // namespace android 593 594