FastMixer.cpp revision 68112fc4f77ab8c4a744782f78e9792afe0cbfc1
1/* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17// <IMPORTANT_WARNING> 18// Design rules for threadLoop() are given in the comments at section "Fast mixer thread" of 19// StateQueue.h. In particular, avoid library and system calls except at well-known points. 20// The design rules are only for threadLoop(), and don't apply to FastMixerDumpState methods. 21// </IMPORTANT_WARNING> 22 23#define LOG_TAG "FastMixer" 24//#define LOG_NDEBUG 0 25 26#define ATRACE_TAG ATRACE_TAG_AUDIO 27 28#include "Configuration.h" 29#include <sys/atomics.h> 30#include <time.h> 31#include <utils/Log.h> 32#include <utils/Trace.h> 33#include <system/audio.h> 34#ifdef FAST_MIXER_STATISTICS 35#include <cpustats/CentralTendencyStatistics.h> 36#ifdef CPU_FREQUENCY_STATISTICS 37#include <cpustats/ThreadCpuUsage.h> 38#endif 39#endif 40#include "AudioMixer.h" 41#include "FastMixer.h" 42 43#define FCC_2 2 // fixed channel count assumption 44 45namespace android { 46 47/*static*/ const FastMixerState FastMixer::initial; 48 49FastMixer::FastMixer() : FastThread(), 50 slopNs(0), 51 // fastTrackNames 52 // generations 53 outputSink(NULL), 54 outputSinkGen(0), 55 mixer(NULL), 56 mixBuffer(NULL), 57 mixBufferState(UNDEFINED), 58 format(Format_Invalid), 59 sampleRate(0), 60 fastTracksGen(0), 61 totalNativeFramesWritten(0), 62 // timestamp 63 nativeFramesWrittenButNotPresented(0) // the = 0 is to silence the compiler 64{ 65 // FIXME pass initial as parameter to base class constructor, and make it static local 66 previous = &initial; 67 current = &initial; 68 69 mDummyDumpState = &dummyDumpState; 70 71 unsigned i; 72 for (i = 0; i < FastMixerState::kMaxFastTracks; ++i) { 73 fastTrackNames[i] = -1; 74 generations[i] = 0; 75 } 76#ifdef FAST_MIXER_STATISTICS 77 oldLoad.tv_sec = 0; 78 oldLoad.tv_nsec = 0; 79#endif 80} 81 82FastMixer::~FastMixer() 83{ 84} 85 86FastMixerStateQueue* FastMixer::sq() 87{ 88 return &mSQ; 89} 90 91const FastThreadState *FastMixer::poll() 92{ 93 return mSQ.poll(); 94} 95 96void FastMixer::setLog(NBLog::Writer *logWriter) 97{ 98 if (mixer != NULL) { 99 mixer->setLog(logWriter); 100 } 101} 102 103void FastMixer::onIdle() 104{ 105 preIdle = *(const FastMixerState *)current; 106 current = &preIdle; 107} 108 109void FastMixer::onExit() 110{ 111 delete mixer; 112 delete[] mixBuffer; 113} 114 115bool FastMixer::isSubClassCommand(FastThreadState::Command command) 116{ 117 switch ((FastMixerState::Command) command) { 118 case FastMixerState::MIX: 119 case FastMixerState::WRITE: 120 case FastMixerState::MIX_WRITE: 121 return true; 122 default: 123 return false; 124 } 125} 126 127void FastMixer::onStateChange() 128{ 129 const FastMixerState * const current = (const FastMixerState *) this->current; 130 const FastMixerState * const previous = (const FastMixerState *) this->previous; 131 FastMixerDumpState * const dumpState = (FastMixerDumpState *) this->dumpState; 132 const size_t frameCount = current->mFrameCount; 133 134 // handle state change here, but since we want to diff the state, 135 // we're prepared for previous == &initial the first time through 136 unsigned previousTrackMask; 137 138 // check for change in output HAL configuration 139 NBAIO_Format previousFormat = format; 140 if (current->mOutputSinkGen != outputSinkGen) { 141 outputSink = current->mOutputSink; 142 outputSinkGen = current->mOutputSinkGen; 143 if (outputSink == NULL) { 144 format = Format_Invalid; 145 sampleRate = 0; 146 } else { 147 format = outputSink->format(); 148 sampleRate = Format_sampleRate(format); 149 ALOG_ASSERT(Format_channelCount(format) == FCC_2); 150 } 151 dumpState->mSampleRate = sampleRate; 152 } 153 154 if ((!Format_isEqual(format, previousFormat)) || (frameCount != previous->mFrameCount)) { 155 // FIXME to avoid priority inversion, don't delete here 156 delete mixer; 157 mixer = NULL; 158 delete[] mixBuffer; 159 mixBuffer = NULL; 160 if (frameCount > 0 && sampleRate > 0) { 161 // FIXME new may block for unbounded time at internal mutex of the heap 162 // implementation; it would be better to have normal mixer allocate for us 163 // to avoid blocking here and to prevent possible priority inversion 164 mixer = new AudioMixer(frameCount, sampleRate, FastMixerState::kMaxFastTracks); 165 mixBuffer = new short[frameCount * FCC_2]; 166 periodNs = (frameCount * 1000000000LL) / sampleRate; // 1.00 167 underrunNs = (frameCount * 1750000000LL) / sampleRate; // 1.75 168 overrunNs = (frameCount * 500000000LL) / sampleRate; // 0.50 169 forceNs = (frameCount * 950000000LL) / sampleRate; // 0.95 170 warmupNs = (frameCount * 500000000LL) / sampleRate; // 0.50 171 } else { 172 periodNs = 0; 173 underrunNs = 0; 174 overrunNs = 0; 175 forceNs = 0; 176 warmupNs = 0; 177 } 178 mixBufferState = UNDEFINED; 179#if !LOG_NDEBUG 180 for (unsigned i = 0; i < FastMixerState::kMaxFastTracks; ++i) { 181 fastTrackNames[i] = -1; 182 } 183#endif 184 // we need to reconfigure all active tracks 185 previousTrackMask = 0; 186 fastTracksGen = current->mFastTracksGen - 1; 187 dumpState->mFrameCount = frameCount; 188 } else { 189 previousTrackMask = previous->mTrackMask; 190 } 191 192 // check for change in active track set 193 const unsigned currentTrackMask = current->mTrackMask; 194 dumpState->mTrackMask = currentTrackMask; 195 if (current->mFastTracksGen != fastTracksGen) { 196 ALOG_ASSERT(mixBuffer != NULL); 197 int name; 198 199 // process removed tracks first to avoid running out of track names 200 unsigned removedTracks = previousTrackMask & ~currentTrackMask; 201 while (removedTracks != 0) { 202 int i = __builtin_ctz(removedTracks); 203 removedTracks &= ~(1 << i); 204 const FastTrack* fastTrack = ¤t->mFastTracks[i]; 205 ALOG_ASSERT(fastTrack->mBufferProvider == NULL); 206 if (mixer != NULL) { 207 name = fastTrackNames[i]; 208 ALOG_ASSERT(name >= 0); 209 mixer->deleteTrackName(name); 210 } 211#if !LOG_NDEBUG 212 fastTrackNames[i] = -1; 213#endif 214 // don't reset track dump state, since other side is ignoring it 215 generations[i] = fastTrack->mGeneration; 216 } 217 218 // now process added tracks 219 unsigned addedTracks = currentTrackMask & ~previousTrackMask; 220 while (addedTracks != 0) { 221 int i = __builtin_ctz(addedTracks); 222 addedTracks &= ~(1 << i); 223 const FastTrack* fastTrack = ¤t->mFastTracks[i]; 224 AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; 225 ALOG_ASSERT(bufferProvider != NULL && fastTrackNames[i] == -1); 226 if (mixer != NULL) { 227 name = mixer->getTrackName(fastTrack->mChannelMask, AUDIO_SESSION_OUTPUT_MIX); 228 ALOG_ASSERT(name >= 0); 229 fastTrackNames[i] = name; 230 mixer->setBufferProvider(name, bufferProvider); 231 mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::MAIN_BUFFER, 232 (void *) mixBuffer); 233 // newly allocated track names default to full scale volume 234 mixer->enable(name); 235 } 236 generations[i] = fastTrack->mGeneration; 237 } 238 239 // finally process (potentially) modified tracks; these use the same slot 240 // but may have a different buffer provider or volume provider 241 unsigned modifiedTracks = currentTrackMask & previousTrackMask; 242 while (modifiedTracks != 0) { 243 int i = __builtin_ctz(modifiedTracks); 244 modifiedTracks &= ~(1 << i); 245 const FastTrack* fastTrack = ¤t->mFastTracks[i]; 246 if (fastTrack->mGeneration != generations[i]) { 247 // this track was actually modified 248 AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; 249 ALOG_ASSERT(bufferProvider != NULL); 250 if (mixer != NULL) { 251 name = fastTrackNames[i]; 252 ALOG_ASSERT(name >= 0); 253 mixer->setBufferProvider(name, bufferProvider); 254 if (fastTrack->mVolumeProvider == NULL) { 255 mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME0, 256 (void *)0x1000); 257 mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME1, 258 (void *)0x1000); 259 } 260 mixer->setParameter(name, AudioMixer::RESAMPLE, 261 AudioMixer::REMOVE, NULL); 262 mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::CHANNEL_MASK, 263 (void *)(uintptr_t) fastTrack->mChannelMask); 264 // already enabled 265 } 266 generations[i] = fastTrack->mGeneration; 267 } 268 } 269 270 fastTracksGen = current->mFastTracksGen; 271 272 dumpState->mNumTracks = popcount(currentTrackMask); 273 } 274} 275 276void FastMixer::onWork() 277{ 278 const FastMixerState * const current = (const FastMixerState *) this->current; 279 FastMixerDumpState * const dumpState = (FastMixerDumpState *) this->dumpState; 280 const FastMixerState::Command command = this->command; 281 const size_t frameCount = current->mFrameCount; 282 283 if ((command & FastMixerState::MIX) && (mixer != NULL) && isWarm) { 284 ALOG_ASSERT(mixBuffer != NULL); 285 // for each track, update volume and check for underrun 286 unsigned currentTrackMask = current->mTrackMask; 287 while (currentTrackMask != 0) { 288 int i = __builtin_ctz(currentTrackMask); 289 currentTrackMask &= ~(1 << i); 290 const FastTrack* fastTrack = ¤t->mFastTracks[i]; 291 292 // Refresh the per-track timestamp 293 if (timestampStatus == NO_ERROR) { 294 uint32_t trackFramesWrittenButNotPresented = 295 nativeFramesWrittenButNotPresented; 296 uint32_t trackFramesWritten = fastTrack->mBufferProvider->framesReleased(); 297 // Can't provide an AudioTimestamp before first frame presented, 298 // or during the brief 32-bit wraparound window 299 if (trackFramesWritten >= trackFramesWrittenButNotPresented) { 300 AudioTimestamp perTrackTimestamp; 301 perTrackTimestamp.mPosition = 302 trackFramesWritten - trackFramesWrittenButNotPresented; 303 perTrackTimestamp.mTime = timestamp.mTime; 304 fastTrack->mBufferProvider->onTimestamp(perTrackTimestamp); 305 } 306 } 307 308 int name = fastTrackNames[i]; 309 ALOG_ASSERT(name >= 0); 310 if (fastTrack->mVolumeProvider != NULL) { 311 uint32_t vlr = fastTrack->mVolumeProvider->getVolumeLR(); 312 mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME0, 313 (void *)(uintptr_t)(vlr & 0xFFFF)); 314 mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME1, 315 (void *)(uintptr_t)(vlr >> 16)); 316 } 317 // FIXME The current implementation of framesReady() for fast tracks 318 // takes a tryLock, which can block 319 // up to 1 ms. If enough active tracks all blocked in sequence, this would result 320 // in the overall fast mix cycle being delayed. Should use a non-blocking FIFO. 321 size_t framesReady = fastTrack->mBufferProvider->framesReady(); 322 if (ATRACE_ENABLED()) { 323 // I wish we had formatted trace names 324 char traceName[16]; 325 strcpy(traceName, "fRdy"); 326 traceName[4] = i + (i < 10 ? '0' : 'A' - 10); 327 traceName[5] = '\0'; 328 ATRACE_INT(traceName, framesReady); 329 } 330 FastTrackDump *ftDump = &dumpState->mTracks[i]; 331 FastTrackUnderruns underruns = ftDump->mUnderruns; 332 if (framesReady < frameCount) { 333 if (framesReady == 0) { 334 underruns.mBitFields.mEmpty++; 335 underruns.mBitFields.mMostRecent = UNDERRUN_EMPTY; 336 mixer->disable(name); 337 } else { 338 // allow mixing partial buffer 339 underruns.mBitFields.mPartial++; 340 underruns.mBitFields.mMostRecent = UNDERRUN_PARTIAL; 341 mixer->enable(name); 342 } 343 } else { 344 underruns.mBitFields.mFull++; 345 underruns.mBitFields.mMostRecent = UNDERRUN_FULL; 346 mixer->enable(name); 347 } 348 ftDump->mUnderruns = underruns; 349 ftDump->mFramesReady = framesReady; 350 } 351 352 int64_t pts; 353 if (outputSink == NULL || (OK != outputSink->getNextWriteTimestamp(&pts))) { 354 pts = AudioBufferProvider::kInvalidPTS; 355 } 356 357 // process() is CPU-bound 358 mixer->process(pts); 359 mixBufferState = MIXED; 360 } else if (mixBufferState == MIXED) { 361 mixBufferState = UNDEFINED; 362 } 363 //bool didFullWrite = false; // dumpsys could display a count of partial writes 364 if ((command & FastMixerState::WRITE) && (outputSink != NULL) && (mixBuffer != NULL)) { 365 if (mixBufferState == UNDEFINED) { 366 memset(mixBuffer, 0, frameCount * FCC_2 * sizeof(short)); 367 mixBufferState = ZEROED; 368 } 369 // if non-NULL, then duplicate write() to this non-blocking sink 370 NBAIO_Sink* teeSink; 371 if ((teeSink = current->mTeeSink) != NULL) { 372 (void) teeSink->write(mixBuffer, frameCount); 373 } 374 // FIXME write() is non-blocking and lock-free for a properly implemented NBAIO sink, 375 // but this code should be modified to handle both non-blocking and blocking sinks 376 dumpState->mWriteSequence++; 377 ATRACE_BEGIN("write"); 378 ssize_t framesWritten = outputSink->write(mixBuffer, frameCount); 379 ATRACE_END(); 380 dumpState->mWriteSequence++; 381 if (framesWritten >= 0) { 382 ALOG_ASSERT((size_t) framesWritten <= frameCount); 383 totalNativeFramesWritten += framesWritten; 384 dumpState->mFramesWritten = totalNativeFramesWritten; 385 //if ((size_t) framesWritten == frameCount) { 386 // didFullWrite = true; 387 //} 388 } else { 389 dumpState->mWriteErrors++; 390 } 391 attemptedWrite = true; 392 // FIXME count # of writes blocked excessively, CPU usage, etc. for dump 393 394 timestampStatus = outputSink->getTimestamp(timestamp); 395 if (timestampStatus == NO_ERROR) { 396 uint32_t totalNativeFramesPresented = timestamp.mPosition; 397 if (totalNativeFramesPresented <= totalNativeFramesWritten) { 398 nativeFramesWrittenButNotPresented = 399 totalNativeFramesWritten - totalNativeFramesPresented; 400 } else { 401 // HAL reported that more frames were presented than were written 402 timestampStatus = INVALID_OPERATION; 403 } 404 } 405 } 406} 407 408FastMixerDumpState::FastMixerDumpState( 409#ifdef FAST_MIXER_STATISTICS 410 uint32_t samplingN 411#endif 412 ) : FastThreadDumpState(), 413 mWriteSequence(0), mFramesWritten(0), 414 mNumTracks(0), mWriteErrors(0), 415 mSampleRate(0), mFrameCount(0), 416 mTrackMask(0) 417{ 418#ifdef FAST_MIXER_STATISTICS 419 increaseSamplingN(samplingN); 420#endif 421} 422 423#ifdef FAST_MIXER_STATISTICS 424void FastMixerDumpState::increaseSamplingN(uint32_t samplingN) 425{ 426 if (samplingN <= mSamplingN || samplingN > kSamplingN || roundup(samplingN) != samplingN) { 427 return; 428 } 429 uint32_t additional = samplingN - mSamplingN; 430 // sample arrays aren't accessed atomically with respect to the bounds, 431 // so clearing reduces chance for dumpsys to read random uninitialized samples 432 memset(&mMonotonicNs[mSamplingN], 0, sizeof(mMonotonicNs[0]) * additional); 433 memset(&mLoadNs[mSamplingN], 0, sizeof(mLoadNs[0]) * additional); 434#ifdef CPU_FREQUENCY_STATISTICS 435 memset(&mCpukHz[mSamplingN], 0, sizeof(mCpukHz[0]) * additional); 436#endif 437 mSamplingN = samplingN; 438} 439#endif 440 441FastMixerDumpState::~FastMixerDumpState() 442{ 443} 444 445// helper function called by qsort() 446static int compare_uint32_t(const void *pa, const void *pb) 447{ 448 uint32_t a = *(const uint32_t *)pa; 449 uint32_t b = *(const uint32_t *)pb; 450 if (a < b) { 451 return -1; 452 } else if (a > b) { 453 return 1; 454 } else { 455 return 0; 456 } 457} 458 459void FastMixerDumpState::dump(int fd) const 460{ 461 if (mCommand == FastMixerState::INITIAL) { 462 fdprintf(fd, " FastMixer not initialized\n"); 463 return; 464 } 465#define COMMAND_MAX 32 466 char string[COMMAND_MAX]; 467 switch (mCommand) { 468 case FastMixerState::INITIAL: 469 strcpy(string, "INITIAL"); 470 break; 471 case FastMixerState::HOT_IDLE: 472 strcpy(string, "HOT_IDLE"); 473 break; 474 case FastMixerState::COLD_IDLE: 475 strcpy(string, "COLD_IDLE"); 476 break; 477 case FastMixerState::EXIT: 478 strcpy(string, "EXIT"); 479 break; 480 case FastMixerState::MIX: 481 strcpy(string, "MIX"); 482 break; 483 case FastMixerState::WRITE: 484 strcpy(string, "WRITE"); 485 break; 486 case FastMixerState::MIX_WRITE: 487 strcpy(string, "MIX_WRITE"); 488 break; 489 default: 490 snprintf(string, COMMAND_MAX, "%d", mCommand); 491 break; 492 } 493 double measuredWarmupMs = (mMeasuredWarmupTs.tv_sec * 1000.0) + 494 (mMeasuredWarmupTs.tv_nsec / 1000000.0); 495 double mixPeriodSec = (double) mFrameCount / (double) mSampleRate; 496 fdprintf(fd, " FastMixer command=%s writeSequence=%u framesWritten=%u\n" 497 " numTracks=%u writeErrors=%u underruns=%u overruns=%u\n" 498 " sampleRate=%u frameCount=%zu measuredWarmup=%.3g ms, warmupCycles=%u\n" 499 " mixPeriod=%.2f ms\n", 500 string, mWriteSequence, mFramesWritten, 501 mNumTracks, mWriteErrors, mUnderruns, mOverruns, 502 mSampleRate, mFrameCount, measuredWarmupMs, mWarmupCycles, 503 mixPeriodSec * 1e3); 504#ifdef FAST_MIXER_STATISTICS 505 // find the interval of valid samples 506 uint32_t bounds = mBounds; 507 uint32_t newestOpen = bounds & 0xFFFF; 508 uint32_t oldestClosed = bounds >> 16; 509 uint32_t n = (newestOpen - oldestClosed) & 0xFFFF; 510 if (n > mSamplingN) { 511 ALOGE("too many samples %u", n); 512 n = mSamplingN; 513 } 514 // statistics for monotonic (wall clock) time, thread raw CPU load in time, CPU clock frequency, 515 // and adjusted CPU load in MHz normalized for CPU clock frequency 516 CentralTendencyStatistics wall, loadNs; 517#ifdef CPU_FREQUENCY_STATISTICS 518 CentralTendencyStatistics kHz, loadMHz; 519 uint32_t previousCpukHz = 0; 520#endif 521 // Assuming a normal distribution for cycle times, three standard deviations on either side of 522 // the mean account for 99.73% of the population. So if we take each tail to be 1/1000 of the 523 // sample set, we get 99.8% combined, or close to three standard deviations. 524 static const uint32_t kTailDenominator = 1000; 525 uint32_t *tail = n >= kTailDenominator ? new uint32_t[n] : NULL; 526 // loop over all the samples 527 for (uint32_t j = 0; j < n; ++j) { 528 size_t i = oldestClosed++ & (mSamplingN - 1); 529 uint32_t wallNs = mMonotonicNs[i]; 530 if (tail != NULL) { 531 tail[j] = wallNs; 532 } 533 wall.sample(wallNs); 534 uint32_t sampleLoadNs = mLoadNs[i]; 535 loadNs.sample(sampleLoadNs); 536#ifdef CPU_FREQUENCY_STATISTICS 537 uint32_t sampleCpukHz = mCpukHz[i]; 538 // skip bad kHz samples 539 if ((sampleCpukHz & ~0xF) != 0) { 540 kHz.sample(sampleCpukHz >> 4); 541 if (sampleCpukHz == previousCpukHz) { 542 double megacycles = (double) sampleLoadNs * (double) (sampleCpukHz >> 4) * 1e-12; 543 double adjMHz = megacycles / mixPeriodSec; // _not_ wallNs * 1e9 544 loadMHz.sample(adjMHz); 545 } 546 } 547 previousCpukHz = sampleCpukHz; 548#endif 549 } 550 if (n) { 551 fdprintf(fd, " Simple moving statistics over last %.1f seconds:\n", 552 wall.n() * mixPeriodSec); 553 fdprintf(fd, " wall clock time in ms per mix cycle:\n" 554 " mean=%.2f min=%.2f max=%.2f stddev=%.2f\n", 555 wall.mean()*1e-6, wall.minimum()*1e-6, wall.maximum()*1e-6, 556 wall.stddev()*1e-6); 557 fdprintf(fd, " raw CPU load in us per mix cycle:\n" 558 " mean=%.0f min=%.0f max=%.0f stddev=%.0f\n", 559 loadNs.mean()*1e-3, loadNs.minimum()*1e-3, loadNs.maximum()*1e-3, 560 loadNs.stddev()*1e-3); 561 } else { 562 fdprintf(fd, " No FastMixer statistics available currently\n"); 563 } 564#ifdef CPU_FREQUENCY_STATISTICS 565 fdprintf(fd, " CPU clock frequency in MHz:\n" 566 " mean=%.0f min=%.0f max=%.0f stddev=%.0f\n", 567 kHz.mean()*1e-3, kHz.minimum()*1e-3, kHz.maximum()*1e-3, kHz.stddev()*1e-3); 568 fdprintf(fd, " adjusted CPU load in MHz (i.e. normalized for CPU clock frequency):\n" 569 " mean=%.1f min=%.1f max=%.1f stddev=%.1f\n", 570 loadMHz.mean(), loadMHz.minimum(), loadMHz.maximum(), loadMHz.stddev()); 571#endif 572 if (tail != NULL) { 573 qsort(tail, n, sizeof(uint32_t), compare_uint32_t); 574 // assume same number of tail samples on each side, left and right 575 uint32_t count = n / kTailDenominator; 576 CentralTendencyStatistics left, right; 577 for (uint32_t i = 0; i < count; ++i) { 578 left.sample(tail[i]); 579 right.sample(tail[n - (i + 1)]); 580 } 581 fdprintf(fd, " Distribution of mix cycle times in ms for the tails (> ~3 stddev outliers):\n" 582 " left tail: mean=%.2f min=%.2f max=%.2f stddev=%.2f\n" 583 " right tail: mean=%.2f min=%.2f max=%.2f stddev=%.2f\n", 584 left.mean()*1e-6, left.minimum()*1e-6, left.maximum()*1e-6, left.stddev()*1e-6, 585 right.mean()*1e-6, right.minimum()*1e-6, right.maximum()*1e-6, 586 right.stddev()*1e-6); 587 delete[] tail; 588 } 589#endif 590 // The active track mask and track states are updated non-atomically. 591 // So if we relied on isActive to decide whether to display, 592 // then we might display an obsolete track or omit an active track. 593 // Instead we always display all tracks, with an indication 594 // of whether we think the track is active. 595 uint32_t trackMask = mTrackMask; 596 fdprintf(fd, " Fast tracks: kMaxFastTracks=%u activeMask=%#x\n", 597 FastMixerState::kMaxFastTracks, trackMask); 598 fdprintf(fd, " Index Active Full Partial Empty Recent Ready\n"); 599 for (uint32_t i = 0; i < FastMixerState::kMaxFastTracks; ++i, trackMask >>= 1) { 600 bool isActive = trackMask & 1; 601 const FastTrackDump *ftDump = &mTracks[i]; 602 const FastTrackUnderruns& underruns = ftDump->mUnderruns; 603 const char *mostRecent; 604 switch (underruns.mBitFields.mMostRecent) { 605 case UNDERRUN_FULL: 606 mostRecent = "full"; 607 break; 608 case UNDERRUN_PARTIAL: 609 mostRecent = "partial"; 610 break; 611 case UNDERRUN_EMPTY: 612 mostRecent = "empty"; 613 break; 614 default: 615 mostRecent = "?"; 616 break; 617 } 618 fdprintf(fd, " %5u %6s %4u %7u %5u %7s %5zu\n", i, isActive ? "yes" : "no", 619 (underruns.mBitFields.mFull) & UNDERRUN_MASK, 620 (underruns.mBitFields.mPartial) & UNDERRUN_MASK, 621 (underruns.mBitFields.mEmpty) & UNDERRUN_MASK, 622 mostRecent, ftDump->mFramesReady); 623 } 624} 625 626} // namespace android 627