1// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/heap/gc-tracer.h"
6
7#include "src/counters.h"
8#include "src/heap/heap-inl.h"
9#include "src/isolate.h"
10
11namespace v8 {
12namespace internal {
13
14static intptr_t CountTotalHolesSize(Heap* heap) {
15  intptr_t holes_size = 0;
16  OldSpaces spaces(heap);
17  for (OldSpace* space = spaces.next(); space != NULL; space = spaces.next()) {
18    holes_size += space->Waste() + space->Available();
19  }
20  return holes_size;
21}
22
23
24GCTracer::Scope::Scope(GCTracer* tracer, ScopeId scope)
25    : tracer_(tracer), scope_(scope) {
26  start_time_ = tracer_->heap_->MonotonicallyIncreasingTimeInMs();
27}
28
29
30GCTracer::Scope::~Scope() {
31  DCHECK(scope_ < NUMBER_OF_SCOPES);  // scope_ is unsigned.
32  tracer_->current_.scopes[scope_] +=
33      tracer_->heap_->MonotonicallyIncreasingTimeInMs() - start_time_;
34}
35
36
37GCTracer::AllocationEvent::AllocationEvent(double duration,
38                                           size_t allocation_in_bytes) {
39  duration_ = duration;
40  allocation_in_bytes_ = allocation_in_bytes;
41}
42
43
44GCTracer::ContextDisposalEvent::ContextDisposalEvent(double time) {
45  time_ = time;
46}
47
48
49GCTracer::SurvivalEvent::SurvivalEvent(double promotion_ratio) {
50  promotion_ratio_ = promotion_ratio;
51}
52
53
54GCTracer::Event::Event(Type type, const char* gc_reason,
55                       const char* collector_reason)
56    : type(type),
57      gc_reason(gc_reason),
58      collector_reason(collector_reason),
59      start_time(0.0),
60      end_time(0.0),
61      reduce_memory(false),
62      start_object_size(0),
63      end_object_size(0),
64      start_memory_size(0),
65      end_memory_size(0),
66      start_holes_size(0),
67      end_holes_size(0),
68      cumulative_incremental_marking_steps(0),
69      incremental_marking_steps(0),
70      cumulative_incremental_marking_bytes(0),
71      incremental_marking_bytes(0),
72      cumulative_incremental_marking_duration(0.0),
73      incremental_marking_duration(0.0),
74      cumulative_pure_incremental_marking_duration(0.0),
75      pure_incremental_marking_duration(0.0),
76      longest_incremental_marking_step(0.0) {
77  for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
78    scopes[i] = 0;
79  }
80}
81
82
83const char* GCTracer::Event::TypeName(bool short_name) const {
84  switch (type) {
85    case SCAVENGER:
86      if (short_name) {
87        return "s";
88      } else {
89        return "Scavenge";
90      }
91    case MARK_COMPACTOR:
92    case INCREMENTAL_MARK_COMPACTOR:
93      if (short_name) {
94        return "ms";
95      } else {
96        return "Mark-sweep";
97      }
98    case START:
99      if (short_name) {
100        return "st";
101      } else {
102        return "Start";
103      }
104  }
105  return "Unknown Event Type";
106}
107
108
109GCTracer::GCTracer(Heap* heap)
110    : heap_(heap),
111      cumulative_incremental_marking_steps_(0),
112      cumulative_incremental_marking_bytes_(0),
113      cumulative_incremental_marking_duration_(0.0),
114      cumulative_pure_incremental_marking_duration_(0.0),
115      longest_incremental_marking_step_(0.0),
116      cumulative_incremental_marking_finalization_steps_(0),
117      cumulative_incremental_marking_finalization_duration_(0.0),
118      longest_incremental_marking_finalization_step_(0.0),
119      cumulative_marking_duration_(0.0),
120      cumulative_sweeping_duration_(0.0),
121      allocation_time_ms_(0.0),
122      new_space_allocation_counter_bytes_(0),
123      old_generation_allocation_counter_bytes_(0),
124      allocation_duration_since_gc_(0.0),
125      new_space_allocation_in_bytes_since_gc_(0),
126      old_generation_allocation_in_bytes_since_gc_(0),
127      combined_mark_compact_speed_cache_(0.0),
128      start_counter_(0) {
129  current_ = Event(Event::START, NULL, NULL);
130  current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
131  previous_ = previous_incremental_mark_compactor_event_ = current_;
132}
133
134
135void GCTracer::Start(GarbageCollector collector, const char* gc_reason,
136                     const char* collector_reason) {
137  start_counter_++;
138  if (start_counter_ != 1) return;
139
140  previous_ = current_;
141  double start_time = heap_->MonotonicallyIncreasingTimeInMs();
142  SampleAllocation(start_time, heap_->NewSpaceAllocationCounter(),
143                   heap_->OldGenerationAllocationCounter());
144  if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR)
145    previous_incremental_mark_compactor_event_ = current_;
146
147  if (collector == SCAVENGER) {
148    current_ = Event(Event::SCAVENGER, gc_reason, collector_reason);
149  } else if (collector == MARK_COMPACTOR) {
150    if (heap_->incremental_marking()->WasActivated()) {
151      current_ =
152          Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason, collector_reason);
153    } else {
154      current_ = Event(Event::MARK_COMPACTOR, gc_reason, collector_reason);
155    }
156  }
157
158  current_.reduce_memory = heap_->ShouldReduceMemory();
159  current_.start_time = start_time;
160  current_.start_object_size = heap_->SizeOfObjects();
161  current_.start_memory_size = heap_->isolate()->memory_allocator()->Size();
162  current_.start_holes_size = CountTotalHolesSize(heap_);
163  current_.new_space_object_size =
164      heap_->new_space()->top() - heap_->new_space()->bottom();
165
166  current_.cumulative_incremental_marking_steps =
167      cumulative_incremental_marking_steps_;
168  current_.cumulative_incremental_marking_bytes =
169      cumulative_incremental_marking_bytes_;
170  current_.cumulative_incremental_marking_duration =
171      cumulative_incremental_marking_duration_;
172  current_.cumulative_pure_incremental_marking_duration =
173      cumulative_pure_incremental_marking_duration_;
174  current_.longest_incremental_marking_step = longest_incremental_marking_step_;
175
176  for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
177    current_.scopes[i] = 0;
178  }
179  int committed_memory = static_cast<int>(heap_->CommittedMemory() / KB);
180  int used_memory = static_cast<int>(current_.start_object_size / KB);
181  heap_->isolate()->counters()->aggregated_memory_heap_committed()->AddSample(
182      start_time, committed_memory);
183  heap_->isolate()->counters()->aggregated_memory_heap_used()->AddSample(
184      start_time, used_memory);
185}
186
187
188void GCTracer::Stop(GarbageCollector collector) {
189  start_counter_--;
190  if (start_counter_ != 0) {
191    Output("[Finished reentrant %s during %s.]\n",
192           collector == SCAVENGER ? "Scavenge" : "Mark-sweep",
193           current_.TypeName(false));
194    return;
195  }
196
197  DCHECK(start_counter_ >= 0);
198  DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) ||
199         (collector == MARK_COMPACTOR &&
200          (current_.type == Event::MARK_COMPACTOR ||
201           current_.type == Event::INCREMENTAL_MARK_COMPACTOR)));
202
203  current_.end_time = heap_->MonotonicallyIncreasingTimeInMs();
204  current_.end_object_size = heap_->SizeOfObjects();
205  current_.end_memory_size = heap_->isolate()->memory_allocator()->Size();
206  current_.end_holes_size = CountTotalHolesSize(heap_);
207  current_.survived_new_space_object_size = heap_->SurvivedNewSpaceObjectSize();
208
209  AddAllocation(current_.end_time);
210
211  int committed_memory = static_cast<int>(heap_->CommittedMemory() / KB);
212  int used_memory = static_cast<int>(current_.end_object_size / KB);
213  heap_->isolate()->counters()->aggregated_memory_heap_committed()->AddSample(
214      current_.end_time, committed_memory);
215  heap_->isolate()->counters()->aggregated_memory_heap_used()->AddSample(
216      current_.end_time, used_memory);
217
218  if (current_.type == Event::SCAVENGER) {
219    current_.incremental_marking_steps =
220        current_.cumulative_incremental_marking_steps -
221        previous_.cumulative_incremental_marking_steps;
222    current_.incremental_marking_bytes =
223        current_.cumulative_incremental_marking_bytes -
224        previous_.cumulative_incremental_marking_bytes;
225    current_.incremental_marking_duration =
226        current_.cumulative_incremental_marking_duration -
227        previous_.cumulative_incremental_marking_duration;
228    current_.pure_incremental_marking_duration =
229        current_.cumulative_pure_incremental_marking_duration -
230        previous_.cumulative_pure_incremental_marking_duration;
231    scavenger_events_.push_front(current_);
232  } else if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
233    current_.incremental_marking_steps =
234        current_.cumulative_incremental_marking_steps -
235        previous_incremental_mark_compactor_event_
236            .cumulative_incremental_marking_steps;
237    current_.incremental_marking_bytes =
238        current_.cumulative_incremental_marking_bytes -
239        previous_incremental_mark_compactor_event_
240            .cumulative_incremental_marking_bytes;
241    current_.incremental_marking_duration =
242        current_.cumulative_incremental_marking_duration -
243        previous_incremental_mark_compactor_event_
244            .cumulative_incremental_marking_duration;
245    current_.pure_incremental_marking_duration =
246        current_.cumulative_pure_incremental_marking_duration -
247        previous_incremental_mark_compactor_event_
248            .cumulative_pure_incremental_marking_duration;
249    longest_incremental_marking_step_ = 0.0;
250    incremental_mark_compactor_events_.push_front(current_);
251    combined_mark_compact_speed_cache_ = 0.0;
252  } else {
253    DCHECK(current_.incremental_marking_bytes == 0);
254    DCHECK(current_.incremental_marking_duration == 0);
255    DCHECK(current_.pure_incremental_marking_duration == 0);
256    longest_incremental_marking_step_ = 0.0;
257    mark_compactor_events_.push_front(current_);
258    combined_mark_compact_speed_cache_ = 0.0;
259  }
260
261  // TODO(ernstm): move the code below out of GCTracer.
262
263  double duration = current_.end_time - current_.start_time;
264  double spent_in_mutator = Max(current_.start_time - previous_.end_time, 0.0);
265
266  heap_->UpdateCumulativeGCStatistics(duration, spent_in_mutator,
267                                      current_.scopes[Scope::MC_MARK]);
268
269  if (current_.type == Event::SCAVENGER && FLAG_trace_gc_ignore_scavenger)
270    return;
271
272  if (FLAG_trace_gc_nvp)
273    PrintNVP();
274  else
275    Print();
276
277  if (FLAG_trace_gc) {
278    heap_->PrintShortHeapStatistics();
279  }
280
281  longest_incremental_marking_finalization_step_ = 0.0;
282  cumulative_incremental_marking_finalization_steps_ = 0;
283  cumulative_incremental_marking_finalization_duration_ = 0.0;
284}
285
286
287void GCTracer::SampleAllocation(double current_ms,
288                                size_t new_space_counter_bytes,
289                                size_t old_generation_counter_bytes) {
290  if (allocation_time_ms_ == 0) {
291    // It is the first sample.
292    allocation_time_ms_ = current_ms;
293    new_space_allocation_counter_bytes_ = new_space_counter_bytes;
294    old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
295    return;
296  }
297  // This assumes that counters are unsigned integers so that the subtraction
298  // below works even if the new counter is less then the old counter.
299  size_t new_space_allocated_bytes =
300      new_space_counter_bytes - new_space_allocation_counter_bytes_;
301  size_t old_generation_allocated_bytes =
302      old_generation_counter_bytes - old_generation_allocation_counter_bytes_;
303  double duration = current_ms - allocation_time_ms_;
304  allocation_time_ms_ = current_ms;
305  new_space_allocation_counter_bytes_ = new_space_counter_bytes;
306  old_generation_allocation_counter_bytes_ = old_generation_counter_bytes;
307  allocation_duration_since_gc_ += duration;
308  new_space_allocation_in_bytes_since_gc_ += new_space_allocated_bytes;
309  old_generation_allocation_in_bytes_since_gc_ +=
310      old_generation_allocated_bytes;
311}
312
313
314void GCTracer::AddAllocation(double current_ms) {
315  allocation_time_ms_ = current_ms;
316  new_space_allocation_events_.push_front(AllocationEvent(
317      allocation_duration_since_gc_, new_space_allocation_in_bytes_since_gc_));
318  old_generation_allocation_events_.push_front(
319      AllocationEvent(allocation_duration_since_gc_,
320                      old_generation_allocation_in_bytes_since_gc_));
321  allocation_duration_since_gc_ = 0;
322  new_space_allocation_in_bytes_since_gc_ = 0;
323  old_generation_allocation_in_bytes_since_gc_ = 0;
324}
325
326
327void GCTracer::AddContextDisposalTime(double time) {
328  context_disposal_events_.push_front(ContextDisposalEvent(time));
329}
330
331
332void GCTracer::AddCompactionEvent(double duration,
333                                  intptr_t live_bytes_compacted) {
334  compaction_events_.push_front(
335      CompactionEvent(duration, live_bytes_compacted));
336}
337
338
339void GCTracer::AddSurvivalRatio(double promotion_ratio) {
340  survival_events_.push_front(SurvivalEvent(promotion_ratio));
341}
342
343
344void GCTracer::AddIncrementalMarkingStep(double duration, intptr_t bytes) {
345  cumulative_incremental_marking_steps_++;
346  cumulative_incremental_marking_bytes_ += bytes;
347  cumulative_incremental_marking_duration_ += duration;
348  longest_incremental_marking_step_ =
349      Max(longest_incremental_marking_step_, duration);
350  cumulative_marking_duration_ += duration;
351  if (bytes > 0) {
352    cumulative_pure_incremental_marking_duration_ += duration;
353  }
354}
355
356
357void GCTracer::AddIncrementalMarkingFinalizationStep(double duration) {
358  cumulative_incremental_marking_finalization_steps_++;
359  cumulative_incremental_marking_finalization_duration_ += duration;
360  longest_incremental_marking_finalization_step_ =
361      Max(longest_incremental_marking_finalization_step_, duration);
362}
363
364
365void GCTracer::Output(const char* format, ...) const {
366  if (FLAG_trace_gc) {
367    va_list arguments;
368    va_start(arguments, format);
369    base::OS::VPrint(format, arguments);
370    va_end(arguments);
371  }
372
373  const int kBufferSize = 256;
374  char raw_buffer[kBufferSize];
375  Vector<char> buffer(raw_buffer, kBufferSize);
376  va_list arguments2;
377  va_start(arguments2, format);
378  VSNPrintF(buffer, format, arguments2);
379  va_end(arguments2);
380
381  heap_->AddToRingBuffer(buffer.start());
382}
383
384
385void GCTracer::Print() const {
386  if (FLAG_trace_gc) {
387    PrintIsolate(heap_->isolate(), "");
388  }
389  Output("%8.0f ms: ", heap_->isolate()->time_millis_since_init());
390
391  Output("%s %.1f (%.1f) -> %.1f (%.1f) MB, ", current_.TypeName(false),
392         static_cast<double>(current_.start_object_size) / MB,
393         static_cast<double>(current_.start_memory_size) / MB,
394         static_cast<double>(current_.end_object_size) / MB,
395         static_cast<double>(current_.end_memory_size) / MB);
396
397  int external_time = static_cast<int>(current_.scopes[Scope::EXTERNAL]);
398  double duration = current_.end_time - current_.start_time;
399  Output("%.1f / %d ms", duration, external_time);
400
401  if (current_.type == Event::SCAVENGER) {
402    if (current_.incremental_marking_steps > 0) {
403      Output(" (+ %.1f ms in %d steps since last GC)",
404             current_.incremental_marking_duration,
405             current_.incremental_marking_steps);
406    }
407  } else {
408    if (current_.incremental_marking_steps > 0) {
409      Output(
410          " (+ %.1f ms in %d steps since start of marking, "
411          "biggest step %.1f ms)",
412          current_.incremental_marking_duration,
413          current_.incremental_marking_steps,
414          current_.longest_incremental_marking_step);
415    }
416  }
417
418  if (current_.gc_reason != NULL) {
419    Output(" [%s]", current_.gc_reason);
420  }
421
422  if (current_.collector_reason != NULL) {
423    Output(" [%s]", current_.collector_reason);
424  }
425
426  Output(".\n");
427}
428
429
430void GCTracer::PrintNVP() const {
431  double duration = current_.end_time - current_.start_time;
432  double spent_in_mutator = current_.start_time - previous_.end_time;
433  intptr_t allocated_since_last_gc =
434      current_.start_object_size - previous_.end_object_size;
435
436  switch (current_.type) {
437    case Event::SCAVENGER:
438      PrintIsolate(heap_->isolate(),
439                   "%8.0f ms: "
440                   "pause=%.1f "
441                   "mutator=%.1f "
442                   "gc=%s "
443                   "reduce_memory=%d "
444                   "scavenge=%.2f "
445                   "old_new=%.2f "
446                   "weak=%.2f "
447                   "roots=%.2f "
448                   "code=%.2f "
449                   "semispace=%.2f "
450                   "object_groups=%.2f "
451                   "steps_count=%d "
452                   "steps_took=%.1f "
453                   "scavenge_throughput=%" V8_PTR_PREFIX
454                   "d "
455                   "total_size_before=%" V8_PTR_PREFIX
456                   "d "
457                   "total_size_after=%" V8_PTR_PREFIX
458                   "d "
459                   "holes_size_before=%" V8_PTR_PREFIX
460                   "d "
461                   "holes_size_after=%" V8_PTR_PREFIX
462                   "d "
463                   "allocated=%" V8_PTR_PREFIX
464                   "d "
465                   "promoted=%" V8_PTR_PREFIX
466                   "d "
467                   "semi_space_copied=%" V8_PTR_PREFIX
468                   "d "
469                   "nodes_died_in_new=%d "
470                   "nodes_copied_in_new=%d "
471                   "nodes_promoted=%d "
472                   "promotion_ratio=%.1f%% "
473                   "average_survival_ratio=%.1f%% "
474                   "promotion_rate=%.1f%% "
475                   "semi_space_copy_rate=%.1f%% "
476                   "new_space_allocation_throughput=%" V8_PTR_PREFIX
477                   "d "
478                   "context_disposal_rate=%.1f\n",
479                   heap_->isolate()->time_millis_since_init(), duration,
480                   spent_in_mutator, current_.TypeName(true),
481                   current_.reduce_memory,
482                   current_.scopes[Scope::SCAVENGER_SCAVENGE],
483                   current_.scopes[Scope::SCAVENGER_OLD_TO_NEW_POINTERS],
484                   current_.scopes[Scope::SCAVENGER_WEAK],
485                   current_.scopes[Scope::SCAVENGER_ROOTS],
486                   current_.scopes[Scope::SCAVENGER_CODE_FLUSH_CANDIDATES],
487                   current_.scopes[Scope::SCAVENGER_SEMISPACE],
488                   current_.scopes[Scope::SCAVENGER_OBJECT_GROUPS],
489                   current_.incremental_marking_steps,
490                   current_.incremental_marking_duration,
491                   ScavengeSpeedInBytesPerMillisecond(),
492                   current_.start_object_size, current_.end_object_size,
493                   current_.start_holes_size, current_.end_holes_size,
494                   allocated_since_last_gc, heap_->promoted_objects_size(),
495                   heap_->semi_space_copied_object_size(),
496                   heap_->nodes_died_in_new_space_,
497                   heap_->nodes_copied_in_new_space_, heap_->nodes_promoted_,
498                   heap_->promotion_ratio_, AverageSurvivalRatio(),
499                   heap_->promotion_rate_, heap_->semi_space_copied_rate_,
500                   NewSpaceAllocationThroughputInBytesPerMillisecond(),
501                   ContextDisposalRateInMilliseconds());
502      break;
503    case Event::MARK_COMPACTOR:
504    case Event::INCREMENTAL_MARK_COMPACTOR:
505      PrintIsolate(
506          heap_->isolate(),
507          "%8.0f ms: "
508          "pause=%.1f "
509          "mutator=%.1f "
510          "gc=%s "
511          "reduce_memory=%d "
512          "external=%.1f "
513          "clear=%1.f "
514          "clear.code_flush=%.1f "
515          "clear.dependent_code=%.1f "
516          "clear.global_handles=%.1f "
517          "clear.maps=%.1f "
518          "clear.slots_buffer=%.1f "
519          "clear.store_buffer=%.1f "
520          "clear.string_table=%.1f "
521          "clear.weak_cells=%.1f "
522          "clear.weak_collections=%.1f "
523          "clear.weak_lists=%.1f "
524          "evacuate=%.1f "
525          "evacuate.candidates=%.1f "
526          "evacuate.clean_up=%.1f "
527          "evacuate.new_space=%.1f "
528          "evacuate.update_pointers=%.1f "
529          "evacuate.update_pointers.between_evacuated=%.1f "
530          "evacuate.update_pointers.to_evacuated=%.1f "
531          "evacuate.update_pointers.to_new=%.1f "
532          "evacuate.update_pointers.weak=%.1f "
533          "finish=%.1f "
534          "mark=%.1f "
535          "mark.finish_incremental=%.1f "
536          "mark.prepare_code_flush=%.1f "
537          "mark.roots=%.1f "
538          "mark.weak_closure=%.1f "
539          "sweep=%.1f "
540          "sweep.code=%.1f "
541          "sweep.map=%.1f "
542          "sweep.old=%.1f "
543          "incremental_finalize=%.1f "
544          "steps_count=%d "
545          "steps_took=%.1f "
546          "longest_step=%.1f "
547          "finalization_steps_count=%d "
548          "finalization_steps_took=%.1f "
549          "finalization_longest_step=%.1f "
550          "incremental_marking_throughput=%" V8_PTR_PREFIX
551          "d "
552          "total_size_before=%" V8_PTR_PREFIX
553          "d "
554          "total_size_after=%" V8_PTR_PREFIX
555          "d "
556          "holes_size_before=%" V8_PTR_PREFIX
557          "d "
558          "holes_size_after=%" V8_PTR_PREFIX
559          "d "
560          "allocated=%" V8_PTR_PREFIX
561          "d "
562          "promoted=%" V8_PTR_PREFIX
563          "d "
564          "semi_space_copied=%" V8_PTR_PREFIX
565          "d "
566          "nodes_died_in_new=%d "
567          "nodes_copied_in_new=%d "
568          "nodes_promoted=%d "
569          "promotion_ratio=%.1f%% "
570          "average_survival_ratio=%.1f%% "
571          "promotion_rate=%.1f%% "
572          "semi_space_copy_rate=%.1f%% "
573          "new_space_allocation_throughput=%" V8_PTR_PREFIX
574          "d "
575          "context_disposal_rate=%.1f "
576          "compaction_speed=%" V8_PTR_PREFIX "d\n",
577          heap_->isolate()->time_millis_since_init(), duration,
578          spent_in_mutator, current_.TypeName(true), current_.reduce_memory,
579          current_.scopes[Scope::EXTERNAL], current_.scopes[Scope::MC_CLEAR],
580          current_.scopes[Scope::MC_CLEAR_CODE_FLUSH],
581          current_.scopes[Scope::MC_CLEAR_DEPENDENT_CODE],
582          current_.scopes[Scope::MC_CLEAR_GLOBAL_HANDLES],
583          current_.scopes[Scope::MC_CLEAR_MAPS],
584          current_.scopes[Scope::MC_CLEAR_SLOTS_BUFFER],
585          current_.scopes[Scope::MC_CLEAR_STORE_BUFFER],
586          current_.scopes[Scope::MC_CLEAR_STRING_TABLE],
587          current_.scopes[Scope::MC_CLEAR_WEAK_CELLS],
588          current_.scopes[Scope::MC_CLEAR_WEAK_COLLECTIONS],
589          current_.scopes[Scope::MC_CLEAR_WEAK_LISTS],
590          current_.scopes[Scope::MC_EVACUATE],
591          current_.scopes[Scope::MC_EVACUATE_CANDIDATES],
592          current_.scopes[Scope::MC_EVACUATE_CLEAN_UP],
593          current_.scopes[Scope::MC_EVACUATE_NEW_SPACE],
594          current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS],
595          current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_BETWEEN_EVACUATED],
596          current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_TO_EVACUATED],
597          current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_TO_NEW],
598          current_.scopes[Scope::MC_EVACUATE_UPDATE_POINTERS_WEAK],
599          current_.scopes[Scope::MC_FINISH], current_.scopes[Scope::MC_MARK],
600          current_.scopes[Scope::MC_MARK_FINISH_INCREMENTAL],
601          current_.scopes[Scope::MC_MARK_PREPARE_CODE_FLUSH],
602          current_.scopes[Scope::MC_MARK_ROOTS],
603          current_.scopes[Scope::MC_MARK_WEAK_CLOSURE],
604          current_.scopes[Scope::MC_SWEEP],
605          current_.scopes[Scope::MC_SWEEP_CODE],
606          current_.scopes[Scope::MC_SWEEP_MAP],
607          current_.scopes[Scope::MC_SWEEP_OLD],
608          current_.scopes[Scope::MC_INCREMENTAL_FINALIZE],
609          current_.incremental_marking_steps,
610          current_.incremental_marking_duration,
611          current_.longest_incremental_marking_step,
612          cumulative_incremental_marking_finalization_steps_,
613          cumulative_incremental_marking_finalization_duration_,
614          longest_incremental_marking_finalization_step_,
615          IncrementalMarkingSpeedInBytesPerMillisecond(),
616          current_.start_object_size, current_.end_object_size,
617          current_.start_holes_size, current_.end_holes_size,
618          allocated_since_last_gc, heap_->promoted_objects_size(),
619          heap_->semi_space_copied_object_size(),
620          heap_->nodes_died_in_new_space_, heap_->nodes_copied_in_new_space_,
621          heap_->nodes_promoted_, heap_->promotion_ratio_,
622          AverageSurvivalRatio(), heap_->promotion_rate_,
623          heap_->semi_space_copied_rate_,
624          NewSpaceAllocationThroughputInBytesPerMillisecond(),
625          ContextDisposalRateInMilliseconds(),
626          CompactionSpeedInBytesPerMillisecond());
627      break;
628    case Event::START:
629      break;
630    default:
631      UNREACHABLE();
632  }
633}
634
635
636double GCTracer::MeanDuration(const EventBuffer& events) const {
637  if (events.empty()) return 0.0;
638
639  double mean = 0.0;
640  EventBuffer::const_iterator iter = events.begin();
641  while (iter != events.end()) {
642    mean += iter->end_time - iter->start_time;
643    ++iter;
644  }
645
646  return mean / events.size();
647}
648
649
650double GCTracer::MaxDuration(const EventBuffer& events) const {
651  if (events.empty()) return 0.0;
652
653  double maximum = 0.0f;
654  EventBuffer::const_iterator iter = events.begin();
655  while (iter != events.end()) {
656    maximum = Max(iter->end_time - iter->start_time, maximum);
657    ++iter;
658  }
659
660  return maximum;
661}
662
663
664double GCTracer::MeanIncrementalMarkingDuration() const {
665  if (cumulative_incremental_marking_steps_ == 0) return 0.0;
666
667  // We haven't completed an entire round of incremental marking, yet.
668  // Use data from GCTracer instead of data from event buffers.
669  if (incremental_mark_compactor_events_.empty()) {
670    return cumulative_incremental_marking_duration_ /
671           cumulative_incremental_marking_steps_;
672  }
673
674  int steps = 0;
675  double durations = 0.0;
676  EventBuffer::const_iterator iter = incremental_mark_compactor_events_.begin();
677  while (iter != incremental_mark_compactor_events_.end()) {
678    steps += iter->incremental_marking_steps;
679    durations += iter->incremental_marking_duration;
680    ++iter;
681  }
682
683  if (steps == 0) return 0.0;
684
685  return durations / steps;
686}
687
688
689double GCTracer::MaxIncrementalMarkingDuration() const {
690  // We haven't completed an entire round of incremental marking, yet.
691  // Use data from GCTracer instead of data from event buffers.
692  if (incremental_mark_compactor_events_.empty())
693    return longest_incremental_marking_step_;
694
695  double max_duration = 0.0;
696  EventBuffer::const_iterator iter = incremental_mark_compactor_events_.begin();
697  while (iter != incremental_mark_compactor_events_.end())
698    max_duration = Max(iter->longest_incremental_marking_step, max_duration);
699
700  return max_duration;
701}
702
703
704intptr_t GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const {
705  if (cumulative_incremental_marking_duration_ == 0.0) return 0;
706
707  // We haven't completed an entire round of incremental marking, yet.
708  // Use data from GCTracer instead of data from event buffers.
709  if (incremental_mark_compactor_events_.empty()) {
710    return static_cast<intptr_t>(cumulative_incremental_marking_bytes_ /
711                                 cumulative_pure_incremental_marking_duration_);
712  }
713
714  intptr_t bytes = 0;
715  double durations = 0.0;
716  EventBuffer::const_iterator iter = incremental_mark_compactor_events_.begin();
717  while (iter != incremental_mark_compactor_events_.end()) {
718    bytes += iter->incremental_marking_bytes;
719    durations += iter->pure_incremental_marking_duration;
720    ++iter;
721  }
722
723  if (durations == 0.0) return 0;
724  // Make sure the result is at least 1.
725  return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
726}
727
728
729intptr_t GCTracer::ScavengeSpeedInBytesPerMillisecond(
730    ScavengeSpeedMode mode) const {
731  intptr_t bytes = 0;
732  double durations = 0.0;
733  EventBuffer::const_iterator iter = scavenger_events_.begin();
734  while (iter != scavenger_events_.end()) {
735    bytes += mode == kForAllObjects ? iter->new_space_object_size
736                                    : iter->survived_new_space_object_size;
737    durations += iter->end_time - iter->start_time;
738    ++iter;
739  }
740
741  if (durations == 0.0) return 0;
742  // Make sure the result is at least 1.
743  return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
744}
745
746
747intptr_t GCTracer::CompactionSpeedInBytesPerMillisecond() const {
748  if (compaction_events_.size() == 0) return 0;
749  intptr_t bytes = 0;
750  double durations = 0.0;
751  CompactionEventBuffer::const_iterator iter = compaction_events_.begin();
752  while (iter != compaction_events_.end()) {
753    bytes += iter->live_bytes_compacted;
754    durations += iter->duration;
755    ++iter;
756  }
757
758  if (durations == 0.0) return 0;
759  // Make sure the result is at least 1.
760  return Max<intptr_t>(static_cast<intptr_t>(bytes / durations + 0.5), 1);
761}
762
763
764intptr_t GCTracer::MarkCompactSpeedInBytesPerMillisecond() const {
765  intptr_t bytes = 0;
766  double durations = 0.0;
767  EventBuffer::const_iterator iter = mark_compactor_events_.begin();
768  while (iter != mark_compactor_events_.end()) {
769    bytes += iter->start_object_size;
770    durations += iter->end_time - iter->start_time;
771    ++iter;
772  }
773
774  if (durations == 0.0) return 0;
775  // Make sure the result is at least 1.
776  return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
777}
778
779
780intptr_t GCTracer::FinalIncrementalMarkCompactSpeedInBytesPerMillisecond()
781    const {
782  intptr_t bytes = 0;
783  double durations = 0.0;
784  EventBuffer::const_iterator iter = incremental_mark_compactor_events_.begin();
785  while (iter != incremental_mark_compactor_events_.end()) {
786    bytes += iter->start_object_size;
787    durations += iter->end_time - iter->start_time;
788    ++iter;
789  }
790
791  if (durations == 0.0) return 0;
792  // Make sure the result is at least 1.
793  return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
794}
795
796
797double GCTracer::CombinedMarkCompactSpeedInBytesPerMillisecond() {
798  if (combined_mark_compact_speed_cache_ > 0)
799    return combined_mark_compact_speed_cache_;
800  const double kMinimumMarkingSpeed = 0.5;
801  double speed1 =
802      static_cast<double>(IncrementalMarkingSpeedInBytesPerMillisecond());
803  double speed2 = static_cast<double>(
804      FinalIncrementalMarkCompactSpeedInBytesPerMillisecond());
805  if (speed1 < kMinimumMarkingSpeed || speed2 < kMinimumMarkingSpeed) {
806    // No data for the incremental marking speed.
807    // Return the non-incremental mark-compact speed.
808    combined_mark_compact_speed_cache_ =
809        static_cast<double>(MarkCompactSpeedInBytesPerMillisecond());
810  } else {
811    // Combine the speed of incremental step and the speed of the final step.
812    // 1 / (1 / speed1 + 1 / speed2) = speed1 * speed2 / (speed1 + speed2).
813    combined_mark_compact_speed_cache_ = speed1 * speed2 / (speed1 + speed2);
814  }
815  return combined_mark_compact_speed_cache_;
816}
817
818
819size_t GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond(
820    double time_ms) const {
821  size_t bytes = new_space_allocation_in_bytes_since_gc_;
822  double durations = allocation_duration_since_gc_;
823  AllocationEventBuffer::const_iterator iter =
824      new_space_allocation_events_.begin();
825  const size_t max_bytes = static_cast<size_t>(-1);
826  while (iter != new_space_allocation_events_.end() &&
827         bytes < max_bytes - bytes && (time_ms == 0 || durations < time_ms)) {
828    bytes += iter->allocation_in_bytes_;
829    durations += iter->duration_;
830    ++iter;
831  }
832
833  if (durations == 0.0) return 0;
834  // Make sure the result is at least 1.
835  return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
836}
837
838
839size_t GCTracer::OldGenerationAllocationThroughputInBytesPerMillisecond(
840    double time_ms) const {
841  size_t bytes = old_generation_allocation_in_bytes_since_gc_;
842  double durations = allocation_duration_since_gc_;
843  AllocationEventBuffer::const_iterator iter =
844      old_generation_allocation_events_.begin();
845  const size_t max_bytes = static_cast<size_t>(-1);
846  while (iter != old_generation_allocation_events_.end() &&
847         bytes < max_bytes - bytes && (time_ms == 0 || durations < time_ms)) {
848    bytes += iter->allocation_in_bytes_;
849    durations += iter->duration_;
850    ++iter;
851  }
852
853  if (durations == 0.0) return 0;
854  // Make sure the result is at least 1.
855  return Max<size_t>(static_cast<size_t>(bytes / durations + 0.5), 1);
856}
857
858
859size_t GCTracer::AllocationThroughputInBytesPerMillisecond(
860    double time_ms) const {
861  return NewSpaceAllocationThroughputInBytesPerMillisecond(time_ms) +
862         OldGenerationAllocationThroughputInBytesPerMillisecond(time_ms);
863}
864
865
866size_t GCTracer::CurrentAllocationThroughputInBytesPerMillisecond() const {
867  return AllocationThroughputInBytesPerMillisecond(kThroughputTimeFrameMs);
868}
869
870
871size_t GCTracer::CurrentOldGenerationAllocationThroughputInBytesPerMillisecond()
872    const {
873  return OldGenerationAllocationThroughputInBytesPerMillisecond(
874      kThroughputTimeFrameMs);
875}
876
877
878double GCTracer::ContextDisposalRateInMilliseconds() const {
879  if (context_disposal_events_.size() < kRingBufferMaxSize) return 0.0;
880
881  double begin = heap_->MonotonicallyIncreasingTimeInMs();
882  double end = 0.0;
883  ContextDisposalEventBuffer::const_iterator iter =
884      context_disposal_events_.begin();
885  while (iter != context_disposal_events_.end()) {
886    end = iter->time_;
887    ++iter;
888  }
889
890  return (begin - end) / context_disposal_events_.size();
891}
892
893
894double GCTracer::AverageSurvivalRatio() const {
895  if (survival_events_.size() == 0) return 0.0;
896
897  double sum_of_rates = 0.0;
898  SurvivalEventBuffer::const_iterator iter = survival_events_.begin();
899  while (iter != survival_events_.end()) {
900    sum_of_rates += iter->promotion_ratio_;
901    ++iter;
902  }
903
904  return sum_of_rates / static_cast<double>(survival_events_.size());
905}
906
907
908bool GCTracer::SurvivalEventsRecorded() const {
909  return survival_events_.size() > 0;
910}
911
912
913void GCTracer::ResetSurvivalEvents() { survival_events_.reset(); }
914}  // namespace internal
915}  // namespace v8
916