NuPlayerRenderer.cpp revision 9b7d950f1f3b0c526712b713dbceb0e22762c015
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "NuPlayerRenderer" 19#include <utils/Log.h> 20 21#include "NuPlayerRenderer.h" 22 23#include <media/stagefright/foundation/ABuffer.h> 24#include <media/stagefright/foundation/ADebug.h> 25#include <media/stagefright/foundation/AMessage.h> 26 27namespace android { 28 29NuPlayer::Renderer::Renderer( 30 const sp<MediaPlayerBase::AudioSink> &sink, 31 const sp<AMessage> ¬ify) 32 : mAudioSink(sink), 33 mNotify(notify), 34 mNumFramesWritten(0), 35 mDrainAudioQueuePending(false), 36 mDrainVideoQueuePending(false), 37 mAudioQueueGeneration(0), 38 mVideoQueueGeneration(0), 39 mAnchorTimeMediaUs(-1), 40 mAnchorTimeRealUs(-1), 41 mFlushingAudio(false), 42 mFlushingVideo(false), 43 mHasAudio(false), 44 mHasVideo(false), 45 mSyncQueues(false), 46 mPaused(false) { 47} 48 49NuPlayer::Renderer::~Renderer() { 50} 51 52void NuPlayer::Renderer::queueBuffer( 53 bool audio, 54 const sp<ABuffer> &buffer, 55 const sp<AMessage> ¬ifyConsumed) { 56 sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id()); 57 msg->setInt32("audio", static_cast<int32_t>(audio)); 58 msg->setObject("buffer", buffer); 59 msg->setMessage("notifyConsumed", notifyConsumed); 60 msg->post(); 61} 62 63void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) { 64 CHECK_NE(finalResult, (status_t)OK); 65 66 sp<AMessage> msg = new AMessage(kWhatQueueEOS, id()); 67 msg->setInt32("audio", static_cast<int32_t>(audio)); 68 msg->setInt32("finalResult", finalResult); 69 msg->post(); 70} 71 72void NuPlayer::Renderer::flush(bool audio) { 73 { 74 Mutex::Autolock autoLock(mFlushLock); 75 if (audio) { 76 CHECK(!mFlushingAudio); 77 mFlushingAudio = true; 78 } else { 79 CHECK(!mFlushingVideo); 80 mFlushingVideo = true; 81 } 82 } 83 84 sp<AMessage> msg = new AMessage(kWhatFlush, id()); 85 msg->setInt32("audio", static_cast<int32_t>(audio)); 86 msg->post(); 87} 88 89void NuPlayer::Renderer::signalTimeDiscontinuity() { 90 CHECK(mAudioQueue.empty()); 91 CHECK(mVideoQueue.empty()); 92 mAnchorTimeMediaUs = -1; 93 mAnchorTimeRealUs = -1; 94 mSyncQueues = mHasAudio && mHasVideo; 95} 96 97void NuPlayer::Renderer::pause() { 98 (new AMessage(kWhatPause, id()))->post(); 99} 100 101void NuPlayer::Renderer::resume() { 102 (new AMessage(kWhatResume, id()))->post(); 103} 104 105void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { 106 switch (msg->what()) { 107 case kWhatDrainAudioQueue: 108 { 109 int32_t generation; 110 CHECK(msg->findInt32("generation", &generation)); 111 if (generation != mAudioQueueGeneration) { 112 break; 113 } 114 115 mDrainAudioQueuePending = false; 116 117 onDrainAudioQueue(); 118 119 postDrainAudioQueue(); 120 break; 121 } 122 123 case kWhatDrainVideoQueue: 124 { 125 int32_t generation; 126 CHECK(msg->findInt32("generation", &generation)); 127 if (generation != mVideoQueueGeneration) { 128 break; 129 } 130 131 mDrainVideoQueuePending = false; 132 133 onDrainVideoQueue(); 134 135 postDrainVideoQueue(); 136 break; 137 } 138 139 case kWhatQueueBuffer: 140 { 141 onQueueBuffer(msg); 142 break; 143 } 144 145 case kWhatQueueEOS: 146 { 147 onQueueEOS(msg); 148 break; 149 } 150 151 case kWhatFlush: 152 { 153 onFlush(msg); 154 break; 155 } 156 157 case kWhatAudioSinkChanged: 158 { 159 onAudioSinkChanged(); 160 break; 161 } 162 163 case kWhatPause: 164 { 165 onPause(); 166 break; 167 } 168 169 case kWhatResume: 170 { 171 onResume(); 172 break; 173 } 174 175 default: 176 TRESPASS(); 177 break; 178 } 179} 180 181void NuPlayer::Renderer::postDrainAudioQueue() { 182 if (mDrainAudioQueuePending || mSyncQueues || mPaused) { 183 return; 184 } 185 186 if (mAudioQueue.empty()) { 187 return; 188 } 189 190 mDrainAudioQueuePending = true; 191 sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id()); 192 msg->setInt32("generation", mAudioQueueGeneration); 193 msg->post(10000); 194} 195 196void NuPlayer::Renderer::signalAudioSinkChanged() { 197 (new AMessage(kWhatAudioSinkChanged, id()))->post(); 198} 199 200void NuPlayer::Renderer::onDrainAudioQueue() { 201 202 for (;;) { 203 uint32_t numFramesPlayed; 204 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); 205 206 ssize_t numFramesAvailableToWrite = 207 mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed); 208 209 size_t numBytesAvailableToWrite = 210 numFramesAvailableToWrite * mAudioSink->frameSize(); 211 212 if (numBytesAvailableToWrite == 0) { 213 break; 214 } 215 216 if (mAudioQueue.empty()) { 217 break; 218 } 219 220 QueueEntry *entry = &*mAudioQueue.begin(); 221 222 if (entry->mBuffer == NULL) { 223 // EOS 224 225 notifyEOS(true /* audio */); 226 227 mAudioQueue.erase(mAudioQueue.begin()); 228 entry = NULL; 229 return; 230 } 231 232 if (entry->mOffset == 0) { 233 int64_t mediaTimeUs; 234 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 235 236 LOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 237 238 mAnchorTimeMediaUs = mediaTimeUs; 239 240 uint32_t numFramesPlayed; 241 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); 242 243 uint32_t numFramesPendingPlayout = 244 mNumFramesWritten - numFramesPlayed; 245 246 int64_t realTimeOffsetUs = 247 (mAudioSink->latency() / 2 /* XXX */ 248 + numFramesPendingPlayout 249 * mAudioSink->msecsPerFrame()) * 1000ll; 250 251 // LOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs); 252 253 mAnchorTimeRealUs = 254 ALooper::GetNowUs() + realTimeOffsetUs; 255 } 256 257 size_t copy = entry->mBuffer->size() - entry->mOffset; 258 if (copy > numBytesAvailableToWrite) { 259 copy = numBytesAvailableToWrite; 260 } 261 262 CHECK_EQ(mAudioSink->write( 263 entry->mBuffer->data() + entry->mOffset, copy), 264 (ssize_t)copy); 265 266 entry->mOffset += copy; 267 if (entry->mOffset == entry->mBuffer->size()) { 268 entry->mNotifyConsumed->post(); 269 mAudioQueue.erase(mAudioQueue.begin()); 270 271 entry = NULL; 272 } 273 274 mNumFramesWritten += copy / mAudioSink->frameSize(); 275 } 276 277 notifyPosition(); 278} 279 280void NuPlayer::Renderer::postDrainVideoQueue() { 281 if (mDrainVideoQueuePending || mSyncQueues || mPaused) { 282 return; 283 } 284 285 if (mVideoQueue.empty()) { 286 return; 287 } 288 289 QueueEntry &entry = *mVideoQueue.begin(); 290 291 sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id()); 292 msg->setInt32("generation", mVideoQueueGeneration); 293 294 int64_t delayUs; 295 296 if (entry.mBuffer == NULL) { 297 // EOS doesn't carry a timestamp. 298 delayUs = 0; 299 } else { 300 int64_t mediaTimeUs; 301 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 302 303 if (mAnchorTimeMediaUs < 0) { 304 delayUs = 0; 305 306 if (!mHasAudio) { 307 mAnchorTimeMediaUs = mediaTimeUs; 308 mAnchorTimeRealUs = ALooper::GetNowUs(); 309 } 310 } else { 311 int64_t realTimeUs = 312 (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs; 313 314 delayUs = realTimeUs - ALooper::GetNowUs(); 315 } 316 } 317 318 msg->post(delayUs); 319 320 mDrainVideoQueuePending = true; 321} 322 323void NuPlayer::Renderer::onDrainVideoQueue() { 324 if (mVideoQueue.empty()) { 325 return; 326 } 327 328 QueueEntry *entry = &*mVideoQueue.begin(); 329 330 if (entry->mBuffer == NULL) { 331 // EOS 332 333 notifyEOS(false /* audio */); 334 335 mVideoQueue.erase(mVideoQueue.begin()); 336 entry = NULL; 337 return; 338 } 339 340#if 0 341 int64_t mediaTimeUs; 342 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 343 344 LOGI("rendering video at media time %.2f secs", mediaTimeUs / 1E6); 345#endif 346 347 entry->mNotifyConsumed->setInt32("render", true); 348 entry->mNotifyConsumed->post(); 349 mVideoQueue.erase(mVideoQueue.begin()); 350 entry = NULL; 351 352 notifyPosition(); 353} 354 355void NuPlayer::Renderer::notifyEOS(bool audio) { 356 sp<AMessage> notify = mNotify->dup(); 357 notify->setInt32("what", kWhatEOS); 358 notify->setInt32("audio", static_cast<int32_t>(audio)); 359 notify->post(); 360} 361 362void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) { 363 int32_t audio; 364 CHECK(msg->findInt32("audio", &audio)); 365 366 if (audio) { 367 mHasAudio = true; 368 } else { 369 mHasVideo = true; 370 } 371 372 if (dropBufferWhileFlushing(audio, msg)) { 373 return; 374 } 375 376 sp<RefBase> obj; 377 CHECK(msg->findObject("buffer", &obj)); 378 sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get()); 379 380 sp<AMessage> notifyConsumed; 381 CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed)); 382 383 QueueEntry entry; 384 entry.mBuffer = buffer; 385 entry.mNotifyConsumed = notifyConsumed; 386 entry.mOffset = 0; 387 entry.mFinalResult = OK; 388 389 if (audio) { 390 mAudioQueue.push_back(entry); 391 postDrainAudioQueue(); 392 } else { 393 mVideoQueue.push_back(entry); 394 postDrainVideoQueue(); 395 } 396 397 if (mSyncQueues && !mAudioQueue.empty() && !mVideoQueue.empty()) { 398 int64_t firstAudioTimeUs; 399 int64_t firstVideoTimeUs; 400 CHECK((*mAudioQueue.begin()).mBuffer->meta() 401 ->findInt64("timeUs", &firstAudioTimeUs)); 402 CHECK((*mVideoQueue.begin()).mBuffer->meta() 403 ->findInt64("timeUs", &firstVideoTimeUs)); 404 405 int64_t diff = firstVideoTimeUs - firstAudioTimeUs; 406 407 LOGV("queueDiff = %.2f secs", diff / 1E6); 408 409 if (diff > 100000ll) { 410 // Audio data starts More than 0.1 secs before video. 411 // Drop some audio. 412 413 (*mAudioQueue.begin()).mNotifyConsumed->post(); 414 mAudioQueue.erase(mAudioQueue.begin()); 415 return; 416 } 417 418 syncQueuesDone(); 419 } 420} 421 422void NuPlayer::Renderer::syncQueuesDone() { 423 if (!mSyncQueues) { 424 return; 425 } 426 427 mSyncQueues = false; 428 429 if (!mAudioQueue.empty()) { 430 postDrainAudioQueue(); 431 } 432 433 if (!mVideoQueue.empty()) { 434 postDrainVideoQueue(); 435 } 436} 437 438void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) { 439 int32_t audio; 440 CHECK(msg->findInt32("audio", &audio)); 441 442 if (dropBufferWhileFlushing(audio, msg)) { 443 return; 444 } 445 446 int32_t finalResult; 447 CHECK(msg->findInt32("finalResult", &finalResult)); 448 449 QueueEntry entry; 450 entry.mOffset = 0; 451 entry.mFinalResult = finalResult; 452 453 if (audio) { 454 mAudioQueue.push_back(entry); 455 postDrainAudioQueue(); 456 } else { 457 mVideoQueue.push_back(entry); 458 postDrainVideoQueue(); 459 } 460} 461 462void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { 463 int32_t audio; 464 CHECK(msg->findInt32("audio", &audio)); 465 466 // If we're currently syncing the queues, i.e. dropping audio while 467 // aligning the first audio/video buffer times and only one of the 468 // two queues has data, we may starve that queue by not requesting 469 // more buffers from the decoder. If the other source then encounters 470 // a discontinuity that leads to flushing, we'll never find the 471 // corresponding discontinuity on the other queue. 472 // Therefore we'll stop syncing the queues if at least one of them 473 // is flushed. 474 syncQueuesDone(); 475 476 if (audio) { 477 flushQueue(&mAudioQueue); 478 479 Mutex::Autolock autoLock(mFlushLock); 480 mFlushingAudio = false; 481 482 mDrainAudioQueuePending = false; 483 ++mAudioQueueGeneration; 484 } else { 485 flushQueue(&mVideoQueue); 486 487 Mutex::Autolock autoLock(mFlushLock); 488 mFlushingVideo = false; 489 490 mDrainVideoQueuePending = false; 491 ++mVideoQueueGeneration; 492 } 493 494 notifyFlushComplete(audio); 495} 496 497void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) { 498 while (!queue->empty()) { 499 QueueEntry *entry = &*queue->begin(); 500 501 if (entry->mBuffer != NULL) { 502 entry->mNotifyConsumed->post(); 503 } 504 505 queue->erase(queue->begin()); 506 entry = NULL; 507 } 508} 509 510void NuPlayer::Renderer::notifyFlushComplete(bool audio) { 511 sp<AMessage> notify = mNotify->dup(); 512 notify->setInt32("what", kWhatFlushComplete); 513 notify->setInt32("audio", static_cast<int32_t>(audio)); 514 notify->post(); 515} 516 517bool NuPlayer::Renderer::dropBufferWhileFlushing( 518 bool audio, const sp<AMessage> &msg) { 519 bool flushing = false; 520 521 { 522 Mutex::Autolock autoLock(mFlushLock); 523 if (audio) { 524 flushing = mFlushingAudio; 525 } else { 526 flushing = mFlushingVideo; 527 } 528 } 529 530 if (!flushing) { 531 return false; 532 } 533 534 sp<AMessage> notifyConsumed; 535 if (msg->findMessage("notifyConsumed", ¬ifyConsumed)) { 536 notifyConsumed->post(); 537 } 538 539 return true; 540} 541 542void NuPlayer::Renderer::onAudioSinkChanged() { 543 CHECK(!mDrainAudioQueuePending); 544 mNumFramesWritten = 0; 545} 546 547void NuPlayer::Renderer::notifyPosition() { 548 if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) { 549 return; 550 } 551 552 int64_t nowUs = ALooper::GetNowUs(); 553 int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs; 554 555 sp<AMessage> notify = mNotify->dup(); 556 notify->setInt32("what", kWhatPosition); 557 notify->setInt64("positionUs", positionUs); 558 notify->post(); 559} 560 561void NuPlayer::Renderer::onPause() { 562 CHECK(!mPaused); 563 564 mDrainAudioQueuePending = false; 565 ++mAudioQueueGeneration; 566 567 mDrainVideoQueuePending = false; 568 ++mVideoQueueGeneration; 569 570 if (mHasAudio) { 571 mAudioSink->pause(); 572 } 573 574 mPaused = true; 575} 576 577void NuPlayer::Renderer::onResume() { 578 CHECK(mPaused); 579 580 if (mHasAudio) { 581 mAudioSink->start(); 582 } 583 584 mPaused = false; 585 586 if (!mAudioQueue.empty()) { 587 postDrainAudioQueue(); 588 } 589 590 if (!mVideoQueue.empty()) { 591 postDrainVideoQueue(); 592 } 593} 594 595} // namespace android 596 597