FastMixer.cpp revision c62476f0c0c1cf9283a38852bde0a4c9434df712
1/* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17// <IMPORTANT_WARNING> 18// Design rules for threadLoop() are given in the comments at section "Fast mixer thread" of 19// StateQueue.h. In particular, avoid library and system calls except at well-known points. 20// The design rules are only for threadLoop(), and don't apply to FastMixerDumpState methods. 21// </IMPORTANT_WARNING> 22 23#define LOG_TAG "FastMixer" 24//#define LOG_NDEBUG 0 25 26#define ATRACE_TAG ATRACE_TAG_AUDIO 27 28#include "Configuration.h" 29#include <time.h> 30#include <utils/Log.h> 31#include <utils/Trace.h> 32#include <system/audio.h> 33#ifdef FAST_MIXER_STATISTICS 34#include <cpustats/CentralTendencyStatistics.h> 35#ifdef CPU_FREQUENCY_STATISTICS 36#include <cpustats/ThreadCpuUsage.h> 37#endif 38#endif 39#include "AudioMixer.h" 40#include "FastMixer.h" 41 42#define FCC_2 2 // fixed channel count assumption 43 44namespace android { 45 46/*static*/ const FastMixerState FastMixer::initial; 47 48FastMixer::FastMixer() : FastThread(), 49 slopNs(0), 50 // fastTrackNames 51 // generations 52 outputSink(NULL), 53 outputSinkGen(0), 54 mixer(NULL), 55 mixBuffer(NULL), 56 mixBufferState(UNDEFINED), 57 format(Format_Invalid), 58 sampleRate(0), 59 fastTracksGen(0), 60 totalNativeFramesWritten(0), 61 // timestamp 62 nativeFramesWrittenButNotPresented(0) // the = 0 is to silence the compiler 63{ 64 // FIXME pass initial as parameter to base class constructor, and make it static local 65 previous = &initial; 66 current = &initial; 67 68 mDummyDumpState = &dummyDumpState; 69 70 unsigned i; 71 for (i = 0; i < FastMixerState::kMaxFastTracks; ++i) { 72 fastTrackNames[i] = -1; 73 generations[i] = 0; 74 } 75#ifdef FAST_MIXER_STATISTICS 76 oldLoad.tv_sec = 0; 77 oldLoad.tv_nsec = 0; 78#endif 79} 80 81FastMixer::~FastMixer() 82{ 83} 84 85FastMixerStateQueue* FastMixer::sq() 86{ 87 return &mSQ; 88} 89 90const FastThreadState *FastMixer::poll() 91{ 92 return mSQ.poll(); 93} 94 95void FastMixer::setLog(NBLog::Writer *logWriter) 96{ 97 if (mixer != NULL) { 98 mixer->setLog(logWriter); 99 } 100} 101 102void FastMixer::onIdle() 103{ 104 preIdle = *(const FastMixerState *)current; 105 current = &preIdle; 106} 107 108void FastMixer::onExit() 109{ 110 delete mixer; 111 delete[] mixBuffer; 112} 113 114bool FastMixer::isSubClassCommand(FastThreadState::Command command) 115{ 116 switch ((FastMixerState::Command) command) { 117 case FastMixerState::MIX: 118 case FastMixerState::WRITE: 119 case FastMixerState::MIX_WRITE: 120 return true; 121 default: 122 return false; 123 } 124} 125 126void FastMixer::onStateChange() 127{ 128 const FastMixerState * const current = (const FastMixerState *) this->current; 129 const FastMixerState * const previous = (const FastMixerState *) this->previous; 130 FastMixerDumpState * const dumpState = (FastMixerDumpState *) this->dumpState; 131 const size_t frameCount = current->mFrameCount; 132 133 // handle state change here, but since we want to diff the state, 134 // we're prepared for previous == &initial the first time through 135 unsigned previousTrackMask; 136 137 // check for change in output HAL configuration 138 NBAIO_Format previousFormat = format; 139 if (current->mOutputSinkGen != outputSinkGen) { 140 outputSink = current->mOutputSink; 141 outputSinkGen = current->mOutputSinkGen; 142 if (outputSink == NULL) { 143 format = Format_Invalid; 144 sampleRate = 0; 145 } else { 146 format = outputSink->format(); 147 sampleRate = Format_sampleRate(format); 148 ALOG_ASSERT(Format_channelCount(format) == FCC_2); 149 } 150 dumpState->mSampleRate = sampleRate; 151 } 152 153 if ((!Format_isEqual(format, previousFormat)) || (frameCount != previous->mFrameCount)) { 154 // FIXME to avoid priority inversion, don't delete here 155 delete mixer; 156 mixer = NULL; 157 delete[] mixBuffer; 158 mixBuffer = NULL; 159 if (frameCount > 0 && sampleRate > 0) { 160 // FIXME new may block for unbounded time at internal mutex of the heap 161 // implementation; it would be better to have normal mixer allocate for us 162 // to avoid blocking here and to prevent possible priority inversion 163 mixer = new AudioMixer(frameCount, sampleRate, FastMixerState::kMaxFastTracks); 164 mixBuffer = new short[frameCount * FCC_2]; 165 periodNs = (frameCount * 1000000000LL) / sampleRate; // 1.00 166 underrunNs = (frameCount * 1750000000LL) / sampleRate; // 1.75 167 overrunNs = (frameCount * 500000000LL) / sampleRate; // 0.50 168 forceNs = (frameCount * 950000000LL) / sampleRate; // 0.95 169 warmupNs = (frameCount * 500000000LL) / sampleRate; // 0.50 170 } else { 171 periodNs = 0; 172 underrunNs = 0; 173 overrunNs = 0; 174 forceNs = 0; 175 warmupNs = 0; 176 } 177 mixBufferState = UNDEFINED; 178#if !LOG_NDEBUG 179 for (unsigned i = 0; i < FastMixerState::kMaxFastTracks; ++i) { 180 fastTrackNames[i] = -1; 181 } 182#endif 183 // we need to reconfigure all active tracks 184 previousTrackMask = 0; 185 fastTracksGen = current->mFastTracksGen - 1; 186 dumpState->mFrameCount = frameCount; 187 } else { 188 previousTrackMask = previous->mTrackMask; 189 } 190 191 // check for change in active track set 192 const unsigned currentTrackMask = current->mTrackMask; 193 dumpState->mTrackMask = currentTrackMask; 194 if (current->mFastTracksGen != fastTracksGen) { 195 ALOG_ASSERT(mixBuffer != NULL); 196 int name; 197 198 // process removed tracks first to avoid running out of track names 199 unsigned removedTracks = previousTrackMask & ~currentTrackMask; 200 while (removedTracks != 0) { 201 int i = __builtin_ctz(removedTracks); 202 removedTracks &= ~(1 << i); 203 const FastTrack* fastTrack = ¤t->mFastTracks[i]; 204 ALOG_ASSERT(fastTrack->mBufferProvider == NULL); 205 if (mixer != NULL) { 206 name = fastTrackNames[i]; 207 ALOG_ASSERT(name >= 0); 208 mixer->deleteTrackName(name); 209 } 210#if !LOG_NDEBUG 211 fastTrackNames[i] = -1; 212#endif 213 // don't reset track dump state, since other side is ignoring it 214 generations[i] = fastTrack->mGeneration; 215 } 216 217 // now process added tracks 218 unsigned addedTracks = currentTrackMask & ~previousTrackMask; 219 while (addedTracks != 0) { 220 int i = __builtin_ctz(addedTracks); 221 addedTracks &= ~(1 << i); 222 const FastTrack* fastTrack = ¤t->mFastTracks[i]; 223 AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; 224 ALOG_ASSERT(bufferProvider != NULL && fastTrackNames[i] == -1); 225 if (mixer != NULL) { 226 name = mixer->getTrackName(fastTrack->mChannelMask, 227 fastTrack->mFormat, AUDIO_SESSION_OUTPUT_MIX); 228 ALOG_ASSERT(name >= 0); 229 fastTrackNames[i] = name; 230 mixer->setBufferProvider(name, bufferProvider); 231 mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::MAIN_BUFFER, 232 (void *) mixBuffer); 233 // newly allocated track names default to full scale volume 234 mixer->enable(name); 235 } 236 generations[i] = fastTrack->mGeneration; 237 } 238 239 // finally process (potentially) modified tracks; these use the same slot 240 // but may have a different buffer provider or volume provider 241 unsigned modifiedTracks = currentTrackMask & previousTrackMask; 242 while (modifiedTracks != 0) { 243 int i = __builtin_ctz(modifiedTracks); 244 modifiedTracks &= ~(1 << i); 245 const FastTrack* fastTrack = ¤t->mFastTracks[i]; 246 if (fastTrack->mGeneration != generations[i]) { 247 // this track was actually modified 248 AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider; 249 ALOG_ASSERT(bufferProvider != NULL); 250 if (mixer != NULL) { 251 name = fastTrackNames[i]; 252 ALOG_ASSERT(name >= 0); 253 mixer->setBufferProvider(name, bufferProvider); 254 if (fastTrack->mVolumeProvider == NULL) { 255 mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME0, 256 (void *) MAX_GAIN_INT); 257 mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME1, 258 (void *) MAX_GAIN_INT); 259 } 260 mixer->setParameter(name, AudioMixer::RESAMPLE, 261 AudioMixer::REMOVE, NULL); 262 mixer->setParameter(name, AudioMixer::TRACK, AudioMixer::CHANNEL_MASK, 263 (void *)(uintptr_t) fastTrack->mChannelMask); 264 // already enabled 265 } 266 generations[i] = fastTrack->mGeneration; 267 } 268 } 269 270 fastTracksGen = current->mFastTracksGen; 271 272 dumpState->mNumTracks = popcount(currentTrackMask); 273 } 274} 275 276void FastMixer::onWork() 277{ 278 const FastMixerState * const current = (const FastMixerState *) this->current; 279 FastMixerDumpState * const dumpState = (FastMixerDumpState *) this->dumpState; 280 const FastMixerState::Command command = this->command; 281 const size_t frameCount = current->mFrameCount; 282 283 if ((command & FastMixerState::MIX) && (mixer != NULL) && isWarm) { 284 ALOG_ASSERT(mixBuffer != NULL); 285 // for each track, update volume and check for underrun 286 unsigned currentTrackMask = current->mTrackMask; 287 while (currentTrackMask != 0) { 288 int i = __builtin_ctz(currentTrackMask); 289 currentTrackMask &= ~(1 << i); 290 const FastTrack* fastTrack = ¤t->mFastTracks[i]; 291 292 // Refresh the per-track timestamp 293 if (timestampStatus == NO_ERROR) { 294 uint32_t trackFramesWrittenButNotPresented = 295 nativeFramesWrittenButNotPresented; 296 uint32_t trackFramesWritten = fastTrack->mBufferProvider->framesReleased(); 297 // Can't provide an AudioTimestamp before first frame presented, 298 // or during the brief 32-bit wraparound window 299 if (trackFramesWritten >= trackFramesWrittenButNotPresented) { 300 AudioTimestamp perTrackTimestamp; 301 perTrackTimestamp.mPosition = 302 trackFramesWritten - trackFramesWrittenButNotPresented; 303 perTrackTimestamp.mTime = timestamp.mTime; 304 fastTrack->mBufferProvider->onTimestamp(perTrackTimestamp); 305 } 306 } 307 308 int name = fastTrackNames[i]; 309 ALOG_ASSERT(name >= 0); 310 if (fastTrack->mVolumeProvider != NULL) { 311 gain_minifloat_packed_t vlr = fastTrack->mVolumeProvider->getVolumeLR(); 312 mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME0, 313 (void *) (uintptr_t) 314 (float_from_gain(gain_minifloat_unpack_left(vlr)) * MAX_GAIN_INT)); 315 mixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME1, 316 (void *) (uintptr_t) 317 (float_from_gain(gain_minifloat_unpack_right(vlr)) * MAX_GAIN_INT)); 318 } 319 // FIXME The current implementation of framesReady() for fast tracks 320 // takes a tryLock, which can block 321 // up to 1 ms. If enough active tracks all blocked in sequence, this would result 322 // in the overall fast mix cycle being delayed. Should use a non-blocking FIFO. 323 size_t framesReady = fastTrack->mBufferProvider->framesReady(); 324 if (ATRACE_ENABLED()) { 325 // I wish we had formatted trace names 326 char traceName[16]; 327 strcpy(traceName, "fRdy"); 328 traceName[4] = i + (i < 10 ? '0' : 'A' - 10); 329 traceName[5] = '\0'; 330 ATRACE_INT(traceName, framesReady); 331 } 332 FastTrackDump *ftDump = &dumpState->mTracks[i]; 333 FastTrackUnderruns underruns = ftDump->mUnderruns; 334 if (framesReady < frameCount) { 335 if (framesReady == 0) { 336 underruns.mBitFields.mEmpty++; 337 underruns.mBitFields.mMostRecent = UNDERRUN_EMPTY; 338 mixer->disable(name); 339 } else { 340 // allow mixing partial buffer 341 underruns.mBitFields.mPartial++; 342 underruns.mBitFields.mMostRecent = UNDERRUN_PARTIAL; 343 mixer->enable(name); 344 } 345 } else { 346 underruns.mBitFields.mFull++; 347 underruns.mBitFields.mMostRecent = UNDERRUN_FULL; 348 mixer->enable(name); 349 } 350 ftDump->mUnderruns = underruns; 351 ftDump->mFramesReady = framesReady; 352 } 353 354 int64_t pts; 355 if (outputSink == NULL || (OK != outputSink->getNextWriteTimestamp(&pts))) { 356 pts = AudioBufferProvider::kInvalidPTS; 357 } 358 359 // process() is CPU-bound 360 mixer->process(pts); 361 mixBufferState = MIXED; 362 } else if (mixBufferState == MIXED) { 363 mixBufferState = UNDEFINED; 364 } 365 //bool didFullWrite = false; // dumpsys could display a count of partial writes 366 if ((command & FastMixerState::WRITE) && (outputSink != NULL) && (mixBuffer != NULL)) { 367 if (mixBufferState == UNDEFINED) { 368 memset(mixBuffer, 0, frameCount * FCC_2 * sizeof(short)); 369 mixBufferState = ZEROED; 370 } 371 // if non-NULL, then duplicate write() to this non-blocking sink 372 NBAIO_Sink* teeSink; 373 if ((teeSink = current->mTeeSink) != NULL) { 374 (void) teeSink->write(mixBuffer, frameCount); 375 } 376 // FIXME write() is non-blocking and lock-free for a properly implemented NBAIO sink, 377 // but this code should be modified to handle both non-blocking and blocking sinks 378 dumpState->mWriteSequence++; 379 ATRACE_BEGIN("write"); 380 ssize_t framesWritten = outputSink->write(mixBuffer, frameCount); 381 ATRACE_END(); 382 dumpState->mWriteSequence++; 383 if (framesWritten >= 0) { 384 ALOG_ASSERT((size_t) framesWritten <= frameCount); 385 totalNativeFramesWritten += framesWritten; 386 dumpState->mFramesWritten = totalNativeFramesWritten; 387 //if ((size_t) framesWritten == frameCount) { 388 // didFullWrite = true; 389 //} 390 } else { 391 dumpState->mWriteErrors++; 392 } 393 attemptedWrite = true; 394 // FIXME count # of writes blocked excessively, CPU usage, etc. for dump 395 396 timestampStatus = outputSink->getTimestamp(timestamp); 397 if (timestampStatus == NO_ERROR) { 398 uint32_t totalNativeFramesPresented = timestamp.mPosition; 399 if (totalNativeFramesPresented <= totalNativeFramesWritten) { 400 nativeFramesWrittenButNotPresented = 401 totalNativeFramesWritten - totalNativeFramesPresented; 402 } else { 403 // HAL reported that more frames were presented than were written 404 timestampStatus = INVALID_OPERATION; 405 } 406 } 407 } 408} 409 410FastMixerDumpState::FastMixerDumpState( 411#ifdef FAST_MIXER_STATISTICS 412 uint32_t samplingN 413#endif 414 ) : FastThreadDumpState(), 415 mWriteSequence(0), mFramesWritten(0), 416 mNumTracks(0), mWriteErrors(0), 417 mSampleRate(0), mFrameCount(0), 418 mTrackMask(0) 419{ 420#ifdef FAST_MIXER_STATISTICS 421 increaseSamplingN(samplingN); 422#endif 423} 424 425#ifdef FAST_MIXER_STATISTICS 426void FastMixerDumpState::increaseSamplingN(uint32_t samplingN) 427{ 428 if (samplingN <= mSamplingN || samplingN > kSamplingN || roundup(samplingN) != samplingN) { 429 return; 430 } 431 uint32_t additional = samplingN - mSamplingN; 432 // sample arrays aren't accessed atomically with respect to the bounds, 433 // so clearing reduces chance for dumpsys to read random uninitialized samples 434 memset(&mMonotonicNs[mSamplingN], 0, sizeof(mMonotonicNs[0]) * additional); 435 memset(&mLoadNs[mSamplingN], 0, sizeof(mLoadNs[0]) * additional); 436#ifdef CPU_FREQUENCY_STATISTICS 437 memset(&mCpukHz[mSamplingN], 0, sizeof(mCpukHz[0]) * additional); 438#endif 439 mSamplingN = samplingN; 440} 441#endif 442 443FastMixerDumpState::~FastMixerDumpState() 444{ 445} 446 447// helper function called by qsort() 448static int compare_uint32_t(const void *pa, const void *pb) 449{ 450 uint32_t a = *(const uint32_t *)pa; 451 uint32_t b = *(const uint32_t *)pb; 452 if (a < b) { 453 return -1; 454 } else if (a > b) { 455 return 1; 456 } else { 457 return 0; 458 } 459} 460 461void FastMixerDumpState::dump(int fd) const 462{ 463 if (mCommand == FastMixerState::INITIAL) { 464 dprintf(fd, " FastMixer not initialized\n"); 465 return; 466 } 467#define COMMAND_MAX 32 468 char string[COMMAND_MAX]; 469 switch (mCommand) { 470 case FastMixerState::INITIAL: 471 strcpy(string, "INITIAL"); 472 break; 473 case FastMixerState::HOT_IDLE: 474 strcpy(string, "HOT_IDLE"); 475 break; 476 case FastMixerState::COLD_IDLE: 477 strcpy(string, "COLD_IDLE"); 478 break; 479 case FastMixerState::EXIT: 480 strcpy(string, "EXIT"); 481 break; 482 case FastMixerState::MIX: 483 strcpy(string, "MIX"); 484 break; 485 case FastMixerState::WRITE: 486 strcpy(string, "WRITE"); 487 break; 488 case FastMixerState::MIX_WRITE: 489 strcpy(string, "MIX_WRITE"); 490 break; 491 default: 492 snprintf(string, COMMAND_MAX, "%d", mCommand); 493 break; 494 } 495 double measuredWarmupMs = (mMeasuredWarmupTs.tv_sec * 1000.0) + 496 (mMeasuredWarmupTs.tv_nsec / 1000000.0); 497 double mixPeriodSec = (double) mFrameCount / (double) mSampleRate; 498 dprintf(fd, " FastMixer command=%s writeSequence=%u framesWritten=%u\n" 499 " numTracks=%u writeErrors=%u underruns=%u overruns=%u\n" 500 " sampleRate=%u frameCount=%zu measuredWarmup=%.3g ms, warmupCycles=%u\n" 501 " mixPeriod=%.2f ms\n", 502 string, mWriteSequence, mFramesWritten, 503 mNumTracks, mWriteErrors, mUnderruns, mOverruns, 504 mSampleRate, mFrameCount, measuredWarmupMs, mWarmupCycles, 505 mixPeriodSec * 1e3); 506#ifdef FAST_MIXER_STATISTICS 507 // find the interval of valid samples 508 uint32_t bounds = mBounds; 509 uint32_t newestOpen = bounds & 0xFFFF; 510 uint32_t oldestClosed = bounds >> 16; 511 uint32_t n = (newestOpen - oldestClosed) & 0xFFFF; 512 if (n > mSamplingN) { 513 ALOGE("too many samples %u", n); 514 n = mSamplingN; 515 } 516 // statistics for monotonic (wall clock) time, thread raw CPU load in time, CPU clock frequency, 517 // and adjusted CPU load in MHz normalized for CPU clock frequency 518 CentralTendencyStatistics wall, loadNs; 519#ifdef CPU_FREQUENCY_STATISTICS 520 CentralTendencyStatistics kHz, loadMHz; 521 uint32_t previousCpukHz = 0; 522#endif 523 // Assuming a normal distribution for cycle times, three standard deviations on either side of 524 // the mean account for 99.73% of the population. So if we take each tail to be 1/1000 of the 525 // sample set, we get 99.8% combined, or close to three standard deviations. 526 static const uint32_t kTailDenominator = 1000; 527 uint32_t *tail = n >= kTailDenominator ? new uint32_t[n] : NULL; 528 // loop over all the samples 529 for (uint32_t j = 0; j < n; ++j) { 530 size_t i = oldestClosed++ & (mSamplingN - 1); 531 uint32_t wallNs = mMonotonicNs[i]; 532 if (tail != NULL) { 533 tail[j] = wallNs; 534 } 535 wall.sample(wallNs); 536 uint32_t sampleLoadNs = mLoadNs[i]; 537 loadNs.sample(sampleLoadNs); 538#ifdef CPU_FREQUENCY_STATISTICS 539 uint32_t sampleCpukHz = mCpukHz[i]; 540 // skip bad kHz samples 541 if ((sampleCpukHz & ~0xF) != 0) { 542 kHz.sample(sampleCpukHz >> 4); 543 if (sampleCpukHz == previousCpukHz) { 544 double megacycles = (double) sampleLoadNs * (double) (sampleCpukHz >> 4) * 1e-12; 545 double adjMHz = megacycles / mixPeriodSec; // _not_ wallNs * 1e9 546 loadMHz.sample(adjMHz); 547 } 548 } 549 previousCpukHz = sampleCpukHz; 550#endif 551 } 552 if (n) { 553 dprintf(fd, " Simple moving statistics over last %.1f seconds:\n", 554 wall.n() * mixPeriodSec); 555 dprintf(fd, " wall clock time in ms per mix cycle:\n" 556 " mean=%.2f min=%.2f max=%.2f stddev=%.2f\n", 557 wall.mean()*1e-6, wall.minimum()*1e-6, wall.maximum()*1e-6, 558 wall.stddev()*1e-6); 559 dprintf(fd, " raw CPU load in us per mix cycle:\n" 560 " mean=%.0f min=%.0f max=%.0f stddev=%.0f\n", 561 loadNs.mean()*1e-3, loadNs.minimum()*1e-3, loadNs.maximum()*1e-3, 562 loadNs.stddev()*1e-3); 563 } else { 564 dprintf(fd, " No FastMixer statistics available currently\n"); 565 } 566#ifdef CPU_FREQUENCY_STATISTICS 567 dprintf(fd, " CPU clock frequency in MHz:\n" 568 " mean=%.0f min=%.0f max=%.0f stddev=%.0f\n", 569 kHz.mean()*1e-3, kHz.minimum()*1e-3, kHz.maximum()*1e-3, kHz.stddev()*1e-3); 570 dprintf(fd, " adjusted CPU load in MHz (i.e. normalized for CPU clock frequency):\n" 571 " mean=%.1f min=%.1f max=%.1f stddev=%.1f\n", 572 loadMHz.mean(), loadMHz.minimum(), loadMHz.maximum(), loadMHz.stddev()); 573#endif 574 if (tail != NULL) { 575 qsort(tail, n, sizeof(uint32_t), compare_uint32_t); 576 // assume same number of tail samples on each side, left and right 577 uint32_t count = n / kTailDenominator; 578 CentralTendencyStatistics left, right; 579 for (uint32_t i = 0; i < count; ++i) { 580 left.sample(tail[i]); 581 right.sample(tail[n - (i + 1)]); 582 } 583 dprintf(fd, " Distribution of mix cycle times in ms for the tails (> ~3 stddev outliers):\n" 584 " left tail: mean=%.2f min=%.2f max=%.2f stddev=%.2f\n" 585 " right tail: mean=%.2f min=%.2f max=%.2f stddev=%.2f\n", 586 left.mean()*1e-6, left.minimum()*1e-6, left.maximum()*1e-6, left.stddev()*1e-6, 587 right.mean()*1e-6, right.minimum()*1e-6, right.maximum()*1e-6, 588 right.stddev()*1e-6); 589 delete[] tail; 590 } 591#endif 592 // The active track mask and track states are updated non-atomically. 593 // So if we relied on isActive to decide whether to display, 594 // then we might display an obsolete track or omit an active track. 595 // Instead we always display all tracks, with an indication 596 // of whether we think the track is active. 597 uint32_t trackMask = mTrackMask; 598 dprintf(fd, " Fast tracks: kMaxFastTracks=%u activeMask=%#x\n", 599 FastMixerState::kMaxFastTracks, trackMask); 600 dprintf(fd, " Index Active Full Partial Empty Recent Ready\n"); 601 for (uint32_t i = 0; i < FastMixerState::kMaxFastTracks; ++i, trackMask >>= 1) { 602 bool isActive = trackMask & 1; 603 const FastTrackDump *ftDump = &mTracks[i]; 604 const FastTrackUnderruns& underruns = ftDump->mUnderruns; 605 const char *mostRecent; 606 switch (underruns.mBitFields.mMostRecent) { 607 case UNDERRUN_FULL: 608 mostRecent = "full"; 609 break; 610 case UNDERRUN_PARTIAL: 611 mostRecent = "partial"; 612 break; 613 case UNDERRUN_EMPTY: 614 mostRecent = "empty"; 615 break; 616 default: 617 mostRecent = "?"; 618 break; 619 } 620 dprintf(fd, " %5u %6s %4u %7u %5u %7s %5zu\n", i, isActive ? "yes" : "no", 621 (underruns.mBitFields.mFull) & UNDERRUN_MASK, 622 (underruns.mBitFields.mPartial) & UNDERRUN_MASK, 623 (underruns.mBitFields.mEmpty) & UNDERRUN_MASK, 624 mostRecent, ftDump->mFramesReady); 625 } 626} 627 628} // namespace android 629