NuPlayerRenderer.cpp revision b408222bd9479c291874b607acae1425d6154fe7
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "NuPlayerRenderer" 19#include <utils/Log.h> 20 21#include "NuPlayerRenderer.h" 22 23#include <media/stagefright/foundation/ABuffer.h> 24#include <media/stagefright/foundation/ADebug.h> 25#include <media/stagefright/foundation/AMessage.h> 26 27namespace android { 28 29NuPlayer::Renderer::Renderer( 30 const sp<MediaPlayerBase::AudioSink> &sink, 31 const sp<AMessage> ¬ify) 32 : mAudioSink(sink), 33 mNotify(notify), 34 mNumFramesWritten(0), 35 mDrainAudioQueuePending(false), 36 mDrainVideoQueuePending(false), 37 mAudioQueueGeneration(0), 38 mVideoQueueGeneration(0), 39 mAnchorTimeMediaUs(-1), 40 mAnchorTimeRealUs(-1), 41 mFlushingAudio(false), 42 mFlushingVideo(false), 43 mHasAudio(mAudioSink != NULL), 44 mHasVideo(true), 45 mSyncQueues(mHasAudio && mHasVideo), 46 mPaused(false) { 47} 48 49NuPlayer::Renderer::~Renderer() { 50} 51 52void NuPlayer::Renderer::queueBuffer( 53 bool audio, 54 const sp<ABuffer> &buffer, 55 const sp<AMessage> ¬ifyConsumed) { 56 sp<AMessage> msg = new AMessage(kWhatQueueBuffer, id()); 57 msg->setInt32("audio", static_cast<int32_t>(audio)); 58 msg->setObject("buffer", buffer); 59 msg->setMessage("notifyConsumed", notifyConsumed); 60 msg->post(); 61} 62 63void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) { 64 CHECK_NE(finalResult, (status_t)OK); 65 66 sp<AMessage> msg = new AMessage(kWhatQueueEOS, id()); 67 msg->setInt32("audio", static_cast<int32_t>(audio)); 68 msg->setInt32("finalResult", finalResult); 69 msg->post(); 70} 71 72void NuPlayer::Renderer::flush(bool audio) { 73 { 74 Mutex::Autolock autoLock(mFlushLock); 75 if (audio) { 76 CHECK(!mFlushingAudio); 77 mFlushingAudio = true; 78 } else { 79 CHECK(!mFlushingVideo); 80 mFlushingVideo = true; 81 } 82 } 83 84 sp<AMessage> msg = new AMessage(kWhatFlush, id()); 85 msg->setInt32("audio", static_cast<int32_t>(audio)); 86 msg->post(); 87} 88 89void NuPlayer::Renderer::signalTimeDiscontinuity() { 90 CHECK(mAudioQueue.empty()); 91 CHECK(mVideoQueue.empty()); 92 mAnchorTimeMediaUs = -1; 93 mAnchorTimeRealUs = -1; 94 mSyncQueues = mHasAudio && mHasVideo; 95} 96 97void NuPlayer::Renderer::pause() { 98 (new AMessage(kWhatPause, id()))->post(); 99} 100 101void NuPlayer::Renderer::resume() { 102 (new AMessage(kWhatResume, id()))->post(); 103} 104 105void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) { 106 switch (msg->what()) { 107 case kWhatDrainAudioQueue: 108 { 109 int32_t generation; 110 CHECK(msg->findInt32("generation", &generation)); 111 if (generation != mAudioQueueGeneration) { 112 break; 113 } 114 115 mDrainAudioQueuePending = false; 116 117 onDrainAudioQueue(); 118 119 postDrainAudioQueue(); 120 break; 121 } 122 123 case kWhatDrainVideoQueue: 124 { 125 int32_t generation; 126 CHECK(msg->findInt32("generation", &generation)); 127 if (generation != mVideoQueueGeneration) { 128 break; 129 } 130 131 mDrainVideoQueuePending = false; 132 133 onDrainVideoQueue(); 134 135 postDrainVideoQueue(); 136 break; 137 } 138 139 case kWhatQueueBuffer: 140 { 141 onQueueBuffer(msg); 142 break; 143 } 144 145 case kWhatQueueEOS: 146 { 147 onQueueEOS(msg); 148 break; 149 } 150 151 case kWhatFlush: 152 { 153 onFlush(msg); 154 break; 155 } 156 157 case kWhatAudioSinkChanged: 158 { 159 onAudioSinkChanged(); 160 break; 161 } 162 163 case kWhatPause: 164 { 165 onPause(); 166 break; 167 } 168 169 case kWhatResume: 170 { 171 onResume(); 172 break; 173 } 174 175 default: 176 TRESPASS(); 177 break; 178 } 179} 180 181void NuPlayer::Renderer::postDrainAudioQueue() { 182 if (mDrainAudioQueuePending || mSyncQueues || mPaused) { 183 return; 184 } 185 186 if (mAudioQueue.empty()) { 187 return; 188 } 189 190 mDrainAudioQueuePending = true; 191 sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, id()); 192 msg->setInt32("generation", mAudioQueueGeneration); 193 msg->post(10000); 194} 195 196void NuPlayer::Renderer::signalAudioSinkChanged() { 197 (new AMessage(kWhatAudioSinkChanged, id()))->post(); 198} 199 200void NuPlayer::Renderer::onDrainAudioQueue() { 201 uint32_t numFramesPlayed; 202 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); 203 204 ssize_t numFramesAvailableToWrite = 205 mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed); 206 207 CHECK_GE(numFramesAvailableToWrite, 0); 208 209 size_t numBytesAvailableToWrite = 210 numFramesAvailableToWrite * mAudioSink->frameSize(); 211 212 while (numBytesAvailableToWrite > 0) { 213 if (mAudioQueue.empty()) { 214 break; 215 } 216 217 QueueEntry *entry = &*mAudioQueue.begin(); 218 219 if (entry->mBuffer == NULL) { 220 // EOS 221 222 notifyEOS(true /* audio */); 223 224 mAudioQueue.erase(mAudioQueue.begin()); 225 entry = NULL; 226 return; 227 } 228 229 if (entry->mOffset == 0) { 230 int64_t mediaTimeUs; 231 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 232 233 LOGV("rendering audio at media time %.2f secs", mediaTimeUs / 1E6); 234 235 mAnchorTimeMediaUs = mediaTimeUs; 236 237 uint32_t numFramesPlayed; 238 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed), (status_t)OK); 239 240 uint32_t numFramesPendingPlayout = 241 mNumFramesWritten - numFramesPlayed; 242 243 int64_t realTimeOffsetUs = 244 (mAudioSink->latency() / 2 /* XXX */ 245 + numFramesPendingPlayout 246 * mAudioSink->msecsPerFrame()) * 1000ll; 247 248 // LOGI("realTimeOffsetUs = %lld us", realTimeOffsetUs); 249 250 mAnchorTimeRealUs = 251 ALooper::GetNowUs() + realTimeOffsetUs; 252 } 253 254 size_t copy = entry->mBuffer->size() - entry->mOffset; 255 if (copy > numBytesAvailableToWrite) { 256 copy = numBytesAvailableToWrite; 257 } 258 259 CHECK_EQ(mAudioSink->write( 260 entry->mBuffer->data() + entry->mOffset, copy), 261 (ssize_t)copy); 262 263 entry->mOffset += copy; 264 if (entry->mOffset == entry->mBuffer->size()) { 265 entry->mNotifyConsumed->post(); 266 mAudioQueue.erase(mAudioQueue.begin()); 267 entry = NULL; 268 } 269 270 numBytesAvailableToWrite -= copy; 271 mNumFramesWritten += copy / mAudioSink->frameSize(); 272 } 273 274 notifyPosition(); 275} 276 277void NuPlayer::Renderer::postDrainVideoQueue() { 278 if (mDrainVideoQueuePending || mSyncQueues || mPaused) { 279 return; 280 } 281 282 if (mVideoQueue.empty()) { 283 return; 284 } 285 286 QueueEntry &entry = *mVideoQueue.begin(); 287 288 sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, id()); 289 msg->setInt32("generation", mVideoQueueGeneration); 290 291 int64_t delayUs; 292 293 if (entry.mBuffer == NULL) { 294 // EOS doesn't carry a timestamp. 295 delayUs = 0; 296 } else { 297 int64_t mediaTimeUs; 298 CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 299 300 if (mAnchorTimeMediaUs < 0) { 301 delayUs = 0; 302 303 if (!mHasAudio) { 304 mAnchorTimeMediaUs = mediaTimeUs; 305 mAnchorTimeRealUs = ALooper::GetNowUs(); 306 } 307 } else { 308 int64_t realTimeUs = 309 (mediaTimeUs - mAnchorTimeMediaUs) + mAnchorTimeRealUs; 310 311 delayUs = realTimeUs - ALooper::GetNowUs(); 312 } 313 } 314 315 msg->post(delayUs); 316 317 mDrainVideoQueuePending = true; 318} 319 320void NuPlayer::Renderer::onDrainVideoQueue() { 321 if (mVideoQueue.empty()) { 322 return; 323 } 324 325 QueueEntry *entry = &*mVideoQueue.begin(); 326 327 if (entry->mBuffer == NULL) { 328 // EOS 329 330 notifyEOS(false /* audio */); 331 332 mVideoQueue.erase(mVideoQueue.begin()); 333 entry = NULL; 334 return; 335 } 336 337#if 0 338 int64_t mediaTimeUs; 339 CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs)); 340 341 LOGI("rendering video at media time %.2f secs", mediaTimeUs / 1E6); 342#endif 343 344 entry->mNotifyConsumed->setInt32("render", true); 345 entry->mNotifyConsumed->post(); 346 mVideoQueue.erase(mVideoQueue.begin()); 347 entry = NULL; 348 349 notifyPosition(); 350} 351 352void NuPlayer::Renderer::notifyEOS(bool audio) { 353 sp<AMessage> notify = mNotify->dup(); 354 notify->setInt32("what", kWhatEOS); 355 notify->setInt32("audio", static_cast<int32_t>(audio)); 356 notify->post(); 357} 358 359void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) { 360 int32_t audio; 361 CHECK(msg->findInt32("audio", &audio)); 362 363 if (dropBufferWhileFlushing(audio, msg)) { 364 return; 365 } 366 367 sp<RefBase> obj; 368 CHECK(msg->findObject("buffer", &obj)); 369 sp<ABuffer> buffer = static_cast<ABuffer *>(obj.get()); 370 371 sp<AMessage> notifyConsumed; 372 CHECK(msg->findMessage("notifyConsumed", ¬ifyConsumed)); 373 374 QueueEntry entry; 375 entry.mBuffer = buffer; 376 entry.mNotifyConsumed = notifyConsumed; 377 entry.mOffset = 0; 378 entry.mFinalResult = OK; 379 380 if (audio) { 381 mAudioQueue.push_back(entry); 382 postDrainAudioQueue(); 383 } else { 384 mVideoQueue.push_back(entry); 385 postDrainVideoQueue(); 386 } 387 388 if (mSyncQueues && !mAudioQueue.empty() && !mVideoQueue.empty()) { 389 int64_t firstAudioTimeUs; 390 int64_t firstVideoTimeUs; 391 CHECK((*mAudioQueue.begin()).mBuffer->meta() 392 ->findInt64("timeUs", &firstAudioTimeUs)); 393 CHECK((*mVideoQueue.begin()).mBuffer->meta() 394 ->findInt64("timeUs", &firstVideoTimeUs)); 395 396 int64_t diff = firstVideoTimeUs - firstAudioTimeUs; 397 398 LOGV("queueDiff = %.2f secs", diff / 1E6); 399 400 if (diff > 100000ll) { 401 // Audio data starts More than 0.1 secs before video. 402 // Drop some audio. 403 404 (*mAudioQueue.begin()).mNotifyConsumed->post(); 405 mAudioQueue.erase(mAudioQueue.begin()); 406 return; 407 } 408 409 syncQueuesDone(); 410 } 411} 412 413void NuPlayer::Renderer::syncQueuesDone() { 414 if (!mSyncQueues) { 415 return; 416 } 417 418 mSyncQueues = false; 419 420 if (!mAudioQueue.empty()) { 421 postDrainAudioQueue(); 422 } 423 424 if (!mVideoQueue.empty()) { 425 postDrainVideoQueue(); 426 } 427} 428 429void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) { 430 int32_t audio; 431 CHECK(msg->findInt32("audio", &audio)); 432 433 if (dropBufferWhileFlushing(audio, msg)) { 434 return; 435 } 436 437 int32_t finalResult; 438 CHECK(msg->findInt32("finalResult", &finalResult)); 439 440 QueueEntry entry; 441 entry.mOffset = 0; 442 entry.mFinalResult = finalResult; 443 444 if (audio) { 445 mAudioQueue.push_back(entry); 446 postDrainAudioQueue(); 447 } else { 448 mVideoQueue.push_back(entry); 449 postDrainVideoQueue(); 450 } 451} 452 453void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) { 454 int32_t audio; 455 CHECK(msg->findInt32("audio", &audio)); 456 457 // If we're currently syncing the queues, i.e. dropping audio while 458 // aligning the first audio/video buffer times and only one of the 459 // two queues has data, we may starve that queue by not requesting 460 // more buffers from the decoder. If the other source then encounters 461 // a discontinuity that leads to flushing, we'll never find the 462 // corresponding discontinuity on the other queue. 463 // Therefore we'll stop syncing the queues if at least one of them 464 // is flushed. 465 syncQueuesDone(); 466 467 if (audio) { 468 flushQueue(&mAudioQueue); 469 470 Mutex::Autolock autoLock(mFlushLock); 471 mFlushingAudio = false; 472 473 mDrainAudioQueuePending = false; 474 ++mAudioQueueGeneration; 475 } else { 476 flushQueue(&mVideoQueue); 477 478 Mutex::Autolock autoLock(mFlushLock); 479 mFlushingVideo = false; 480 481 mDrainVideoQueuePending = false; 482 ++mVideoQueueGeneration; 483 } 484 485 notifyFlushComplete(audio); 486} 487 488void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) { 489 while (!queue->empty()) { 490 QueueEntry *entry = &*queue->begin(); 491 492 if (entry->mBuffer != NULL) { 493 entry->mNotifyConsumed->post(); 494 } 495 496 queue->erase(queue->begin()); 497 entry = NULL; 498 } 499} 500 501void NuPlayer::Renderer::notifyFlushComplete(bool audio) { 502 sp<AMessage> notify = mNotify->dup(); 503 notify->setInt32("what", kWhatFlushComplete); 504 notify->setInt32("audio", static_cast<int32_t>(audio)); 505 notify->post(); 506} 507 508bool NuPlayer::Renderer::dropBufferWhileFlushing( 509 bool audio, const sp<AMessage> &msg) { 510 bool flushing = false; 511 512 { 513 Mutex::Autolock autoLock(mFlushLock); 514 if (audio) { 515 flushing = mFlushingAudio; 516 } else { 517 flushing = mFlushingVideo; 518 } 519 } 520 521 if (!flushing) { 522 return false; 523 } 524 525 sp<AMessage> notifyConsumed; 526 if (msg->findMessage("notifyConsumed", ¬ifyConsumed)) { 527 notifyConsumed->post(); 528 } 529 530 return true; 531} 532 533void NuPlayer::Renderer::onAudioSinkChanged() { 534 CHECK(!mDrainAudioQueuePending); 535 mNumFramesWritten = 0; 536} 537 538void NuPlayer::Renderer::notifyPosition() { 539 if (mAnchorTimeRealUs < 0 || mAnchorTimeMediaUs < 0) { 540 return; 541 } 542 543 int64_t nowUs = ALooper::GetNowUs(); 544 int64_t positionUs = (nowUs - mAnchorTimeRealUs) + mAnchorTimeMediaUs; 545 546 sp<AMessage> notify = mNotify->dup(); 547 notify->setInt32("what", kWhatPosition); 548 notify->setInt64("positionUs", positionUs); 549 notify->post(); 550} 551 552void NuPlayer::Renderer::onPause() { 553 CHECK(!mPaused); 554 555 mDrainAudioQueuePending = false; 556 ++mAudioQueueGeneration; 557 558 mDrainVideoQueuePending = false; 559 ++mVideoQueueGeneration; 560 561 if (mHasAudio) { 562 mAudioSink->pause(); 563 } 564 565 mPaused = true; 566} 567 568void NuPlayer::Renderer::onResume() { 569 CHECK(mPaused); 570 571 if (mHasAudio) { 572 mAudioSink->start(); 573 } 574 575 mPaused = false; 576 577 if (!mAudioQueue.empty()) { 578 postDrainAudioQueue(); 579 } 580 581 if (!mVideoQueue.empty()) { 582 postDrainVideoQueue(); 583 } 584} 585 586} // namespace android 587 588