1// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/profiler/heap-snapshot-generator.h"
6
7#include "src/code-stubs.h"
8#include "src/conversions.h"
9#include "src/debug/debug.h"
10#include "src/objects-body-descriptors.h"
11#include "src/profiler/allocation-tracker.h"
12#include "src/profiler/heap-profiler.h"
13#include "src/profiler/heap-snapshot-generator-inl.h"
14
15namespace v8 {
16namespace internal {
17
18
19HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
20    : bit_field_(TypeField::encode(type) | FromIndexField::encode(from)),
21      to_index_(to),
22      name_(name) {
23  DCHECK(type == kContextVariable
24      || type == kProperty
25      || type == kInternal
26      || type == kShortcut
27      || type == kWeak);
28}
29
30
31HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
32    : bit_field_(TypeField::encode(type) | FromIndexField::encode(from)),
33      to_index_(to),
34      index_(index) {
35  DCHECK(type == kElement || type == kHidden);
36}
37
38
39void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
40  to_entry_ = &snapshot->entries()[to_index_];
41}
42
43
44const int HeapEntry::kNoEntry = -1;
45
46HeapEntry::HeapEntry(HeapSnapshot* snapshot,
47                     Type type,
48                     const char* name,
49                     SnapshotObjectId id,
50                     size_t self_size,
51                     unsigned trace_node_id)
52    : type_(type),
53      children_count_(0),
54      children_index_(-1),
55      self_size_(self_size),
56      snapshot_(snapshot),
57      name_(name),
58      id_(id),
59      trace_node_id_(trace_node_id) { }
60
61
62void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
63                                  const char* name,
64                                  HeapEntry* entry) {
65  HeapGraphEdge edge(type, name, this->index(), entry->index());
66  snapshot_->edges().Add(edge);
67  ++children_count_;
68}
69
70
71void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
72                                    int index,
73                                    HeapEntry* entry) {
74  HeapGraphEdge edge(type, index, this->index(), entry->index());
75  snapshot_->edges().Add(edge);
76  ++children_count_;
77}
78
79
80void HeapEntry::Print(
81    const char* prefix, const char* edge_name, int max_depth, int indent) {
82  STATIC_ASSERT(sizeof(unsigned) == sizeof(id()));
83  base::OS::Print("%6" PRIuS " @%6u %*c %s%s: ", self_size(), id(), indent, ' ',
84                  prefix, edge_name);
85  if (type() != kString) {
86    base::OS::Print("%s %.40s\n", TypeAsString(), name_);
87  } else {
88    base::OS::Print("\"");
89    const char* c = name_;
90    while (*c && (c - name_) <= 40) {
91      if (*c != '\n')
92        base::OS::Print("%c", *c);
93      else
94        base::OS::Print("\\n");
95      ++c;
96    }
97    base::OS::Print("\"\n");
98  }
99  if (--max_depth == 0) return;
100  Vector<HeapGraphEdge*> ch = children();
101  for (int i = 0; i < ch.length(); ++i) {
102    HeapGraphEdge& edge = *ch[i];
103    const char* edge_prefix = "";
104    EmbeddedVector<char, 64> index;
105    const char* edge_name = index.start();
106    switch (edge.type()) {
107      case HeapGraphEdge::kContextVariable:
108        edge_prefix = "#";
109        edge_name = edge.name();
110        break;
111      case HeapGraphEdge::kElement:
112        SNPrintF(index, "%d", edge.index());
113        break;
114      case HeapGraphEdge::kInternal:
115        edge_prefix = "$";
116        edge_name = edge.name();
117        break;
118      case HeapGraphEdge::kProperty:
119        edge_name = edge.name();
120        break;
121      case HeapGraphEdge::kHidden:
122        edge_prefix = "$";
123        SNPrintF(index, "%d", edge.index());
124        break;
125      case HeapGraphEdge::kShortcut:
126        edge_prefix = "^";
127        edge_name = edge.name();
128        break;
129      case HeapGraphEdge::kWeak:
130        edge_prefix = "w";
131        edge_name = edge.name();
132        break;
133      default:
134        SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
135    }
136    edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
137  }
138}
139
140
141const char* HeapEntry::TypeAsString() {
142  switch (type()) {
143    case kHidden: return "/hidden/";
144    case kObject: return "/object/";
145    case kClosure: return "/closure/";
146    case kString: return "/string/";
147    case kCode: return "/code/";
148    case kArray: return "/array/";
149    case kRegExp: return "/regexp/";
150    case kHeapNumber: return "/number/";
151    case kNative: return "/native/";
152    case kSynthetic: return "/synthetic/";
153    case kConsString: return "/concatenated string/";
154    case kSlicedString: return "/sliced string/";
155    case kSymbol: return "/symbol/";
156    case kSimdValue: return "/simd/";
157    default: return "???";
158  }
159}
160
161
162// It is very important to keep objects that form a heap snapshot
163// as small as possible.
164namespace {  // Avoid littering the global namespace.
165
166template <size_t ptr_size> struct SnapshotSizeConstants;
167
168template <> struct SnapshotSizeConstants<4> {
169  static const int kExpectedHeapGraphEdgeSize = 12;
170  static const int kExpectedHeapEntrySize = 28;
171};
172
173template <> struct SnapshotSizeConstants<8> {
174  static const int kExpectedHeapGraphEdgeSize = 24;
175  static const int kExpectedHeapEntrySize = 40;
176};
177
178}  // namespace
179
180
181HeapSnapshot::HeapSnapshot(HeapProfiler* profiler)
182    : profiler_(profiler),
183      root_index_(HeapEntry::kNoEntry),
184      gc_roots_index_(HeapEntry::kNoEntry),
185      max_snapshot_js_object_id_(0) {
186  STATIC_ASSERT(
187      sizeof(HeapGraphEdge) ==
188      SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
189  STATIC_ASSERT(
190      sizeof(HeapEntry) ==
191      SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
192  USE(SnapshotSizeConstants<4>::kExpectedHeapGraphEdgeSize);
193  USE(SnapshotSizeConstants<4>::kExpectedHeapEntrySize);
194  USE(SnapshotSizeConstants<8>::kExpectedHeapGraphEdgeSize);
195  USE(SnapshotSizeConstants<8>::kExpectedHeapEntrySize);
196  for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
197    gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
198  }
199}
200
201
202void HeapSnapshot::Delete() {
203  profiler_->RemoveSnapshot(this);
204  delete this;
205}
206
207
208void HeapSnapshot::RememberLastJSObjectId() {
209  max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id();
210}
211
212
213void HeapSnapshot::AddSyntheticRootEntries() {
214  AddRootEntry();
215  AddGcRootsEntry();
216  SnapshotObjectId id = HeapObjectsMap::kGcRootsFirstSubrootId;
217  for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
218    AddGcSubrootEntry(tag, id);
219    id += HeapObjectsMap::kObjectIdStep;
220  }
221  DCHECK(HeapObjectsMap::kFirstAvailableObjectId == id);
222}
223
224
225HeapEntry* HeapSnapshot::AddRootEntry() {
226  DCHECK(root_index_ == HeapEntry::kNoEntry);
227  DCHECK(entries_.is_empty());  // Root entry must be the first one.
228  HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
229                              "",
230                              HeapObjectsMap::kInternalRootObjectId,
231                              0,
232                              0);
233  root_index_ = entry->index();
234  DCHECK(root_index_ == 0);
235  return entry;
236}
237
238
239HeapEntry* HeapSnapshot::AddGcRootsEntry() {
240  DCHECK(gc_roots_index_ == HeapEntry::kNoEntry);
241  HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
242                              "(GC roots)",
243                              HeapObjectsMap::kGcRootsObjectId,
244                              0,
245                              0);
246  gc_roots_index_ = entry->index();
247  return entry;
248}
249
250
251HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag, SnapshotObjectId id) {
252  DCHECK(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
253  DCHECK(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
254  HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
255                              VisitorSynchronization::kTagNames[tag], id, 0, 0);
256  gc_subroot_indexes_[tag] = entry->index();
257  return entry;
258}
259
260
261HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
262                                  const char* name,
263                                  SnapshotObjectId id,
264                                  size_t size,
265                                  unsigned trace_node_id) {
266  HeapEntry entry(this, type, name, id, size, trace_node_id);
267  entries_.Add(entry);
268  return &entries_.last();
269}
270
271
272void HeapSnapshot::FillChildren() {
273  DCHECK(children().is_empty());
274  children().Allocate(edges().length());
275  int children_index = 0;
276  for (int i = 0; i < entries().length(); ++i) {
277    HeapEntry* entry = &entries()[i];
278    children_index = entry->set_children_index(children_index);
279  }
280  DCHECK(edges().length() == children_index);
281  for (int i = 0; i < edges().length(); ++i) {
282    HeapGraphEdge* edge = &edges()[i];
283    edge->ReplaceToIndexWithEntry(this);
284    edge->from()->add_child(edge);
285  }
286}
287
288
289class FindEntryById {
290 public:
291  explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
292  int operator()(HeapEntry* const* entry) {
293    if ((*entry)->id() == id_) return 0;
294    return (*entry)->id() < id_ ? -1 : 1;
295  }
296 private:
297  SnapshotObjectId id_;
298};
299
300
301HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
302  List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
303  // Perform a binary search by id.
304  int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
305  if (index == -1)
306    return NULL;
307  return entries_by_id->at(index);
308}
309
310
311template<class T>
312static int SortByIds(const T* entry1_ptr,
313                     const T* entry2_ptr) {
314  if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
315  return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
316}
317
318
319List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
320  if (sorted_entries_.is_empty()) {
321    sorted_entries_.Allocate(entries_.length());
322    for (int i = 0; i < entries_.length(); ++i) {
323      sorted_entries_[i] = &entries_[i];
324    }
325    sorted_entries_.Sort<int (*)(HeapEntry* const*, HeapEntry* const*)>(
326        SortByIds);
327  }
328  return &sorted_entries_;
329}
330
331
332void HeapSnapshot::Print(int max_depth) {
333  root()->Print("", "", max_depth, 0);
334}
335
336
337size_t HeapSnapshot::RawSnapshotSize() const {
338  return
339      sizeof(*this) +
340      GetMemoryUsedByList(entries_) +
341      GetMemoryUsedByList(edges_) +
342      GetMemoryUsedByList(children_) +
343      GetMemoryUsedByList(sorted_entries_);
344}
345
346
347// We split IDs on evens for embedder objects (see
348// HeapObjectsMap::GenerateId) and odds for native objects.
349const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
350const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
351    HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
352const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
353    HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
354const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
355    HeapObjectsMap::kGcRootsFirstSubrootId +
356    VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
357
358HeapObjectsMap::HeapObjectsMap(Heap* heap)
359    : next_id_(kFirstAvailableObjectId), heap_(heap) {
360  // This dummy element solves a problem with entries_map_.
361  // When we do lookup in HashMap we see no difference between two cases:
362  // it has an entry with NULL as the value or it has created
363  // a new entry on the fly with NULL as the default value.
364  // With such dummy element we have a guaranty that all entries_map_ entries
365  // will have the value field grater than 0.
366  // This fact is using in MoveObject method.
367  entries_.Add(EntryInfo(0, NULL, 0));
368}
369
370
371bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
372  DCHECK(to != NULL);
373  DCHECK(from != NULL);
374  if (from == to) return false;
375  void* from_value = entries_map_.Remove(from, ComputePointerHash(from));
376  if (from_value == NULL) {
377    // It may occur that some untracked object moves to an address X and there
378    // is a tracked object at that address. In this case we should remove the
379    // entry as we know that the object has died.
380    void* to_value = entries_map_.Remove(to, ComputePointerHash(to));
381    if (to_value != NULL) {
382      int to_entry_info_index =
383          static_cast<int>(reinterpret_cast<intptr_t>(to_value));
384      entries_.at(to_entry_info_index).addr = NULL;
385    }
386  } else {
387    base::HashMap::Entry* to_entry =
388        entries_map_.LookupOrInsert(to, ComputePointerHash(to));
389    if (to_entry->value != NULL) {
390      // We found the existing entry with to address for an old object.
391      // Without this operation we will have two EntryInfo's with the same
392      // value in addr field. It is bad because later at RemoveDeadEntries
393      // one of this entry will be removed with the corresponding entries_map_
394      // entry.
395      int to_entry_info_index =
396          static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
397      entries_.at(to_entry_info_index).addr = NULL;
398    }
399    int from_entry_info_index =
400        static_cast<int>(reinterpret_cast<intptr_t>(from_value));
401    entries_.at(from_entry_info_index).addr = to;
402    // Size of an object can change during its life, so to keep information
403    // about the object in entries_ consistent, we have to adjust size when the
404    // object is migrated.
405    if (FLAG_heap_profiler_trace_objects) {
406      PrintF("Move object from %p to %p old size %6d new size %6d\n",
407             static_cast<void*>(from), static_cast<void*>(to),
408             entries_.at(from_entry_info_index).size, object_size);
409    }
410    entries_.at(from_entry_info_index).size = object_size;
411    to_entry->value = from_value;
412  }
413  return from_value != NULL;
414}
415
416
417void HeapObjectsMap::UpdateObjectSize(Address addr, int size) {
418  FindOrAddEntry(addr, size, false);
419}
420
421
422SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
423  base::HashMap::Entry* entry =
424      entries_map_.Lookup(addr, ComputePointerHash(addr));
425  if (entry == NULL) return 0;
426  int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
427  EntryInfo& entry_info = entries_.at(entry_index);
428  DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
429  return entry_info.id;
430}
431
432
433SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
434                                                unsigned int size,
435                                                bool accessed) {
436  DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
437  base::HashMap::Entry* entry =
438      entries_map_.LookupOrInsert(addr, ComputePointerHash(addr));
439  if (entry->value != NULL) {
440    int entry_index =
441        static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
442    EntryInfo& entry_info = entries_.at(entry_index);
443    entry_info.accessed = accessed;
444    if (FLAG_heap_profiler_trace_objects) {
445      PrintF("Update object size : %p with old size %d and new size %d\n",
446             static_cast<void*>(addr), entry_info.size, size);
447    }
448    entry_info.size = size;
449    return entry_info.id;
450  }
451  entry->value = reinterpret_cast<void*>(entries_.length());
452  SnapshotObjectId id = next_id_;
453  next_id_ += kObjectIdStep;
454  entries_.Add(EntryInfo(id, addr, size, accessed));
455  DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
456  return id;
457}
458
459
460void HeapObjectsMap::StopHeapObjectsTracking() {
461  time_intervals_.Clear();
462}
463
464
465void HeapObjectsMap::UpdateHeapObjectsMap() {
466  if (FLAG_heap_profiler_trace_objects) {
467    PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
468           entries_map_.occupancy());
469  }
470  heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
471                           GarbageCollectionReason::kHeapProfiler);
472  HeapIterator iterator(heap_);
473  for (HeapObject* obj = iterator.next();
474       obj != NULL;
475       obj = iterator.next()) {
476    FindOrAddEntry(obj->address(), obj->Size());
477    if (FLAG_heap_profiler_trace_objects) {
478      PrintF("Update object      : %p %6d. Next address is %p\n",
479             static_cast<void*>(obj->address()), obj->Size(),
480             static_cast<void*>(obj->address() + obj->Size()));
481    }
482  }
483  RemoveDeadEntries();
484  if (FLAG_heap_profiler_trace_objects) {
485    PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
486           entries_map_.occupancy());
487  }
488}
489
490
491namespace {
492
493
494struct HeapObjectInfo {
495  HeapObjectInfo(HeapObject* obj, int expected_size)
496    : obj(obj),
497      expected_size(expected_size) {
498  }
499
500  HeapObject* obj;
501  int expected_size;
502
503  bool IsValid() const { return expected_size == obj->Size(); }
504
505  void Print() const {
506    if (expected_size == 0) {
507      PrintF("Untracked object   : %p %6d. Next address is %p\n",
508             static_cast<void*>(obj->address()), obj->Size(),
509             static_cast<void*>(obj->address() + obj->Size()));
510    } else if (obj->Size() != expected_size) {
511      PrintF("Wrong size %6d: %p %6d. Next address is %p\n", expected_size,
512             static_cast<void*>(obj->address()), obj->Size(),
513             static_cast<void*>(obj->address() + obj->Size()));
514    } else {
515      PrintF("Good object      : %p %6d. Next address is %p\n",
516             static_cast<void*>(obj->address()), expected_size,
517             static_cast<void*>(obj->address() + obj->Size()));
518    }
519  }
520};
521
522
523static int comparator(const HeapObjectInfo* a, const HeapObjectInfo* b) {
524  if (a->obj < b->obj) return -1;
525  if (a->obj > b->obj) return 1;
526  return 0;
527}
528
529
530}  // namespace
531
532
533int HeapObjectsMap::FindUntrackedObjects() {
534  List<HeapObjectInfo> heap_objects(1000);
535
536  HeapIterator iterator(heap_);
537  int untracked = 0;
538  for (HeapObject* obj = iterator.next();
539       obj != NULL;
540       obj = iterator.next()) {
541    base::HashMap::Entry* entry =
542        entries_map_.Lookup(obj->address(), ComputePointerHash(obj->address()));
543    if (entry == NULL) {
544      ++untracked;
545      if (FLAG_heap_profiler_trace_objects) {
546        heap_objects.Add(HeapObjectInfo(obj, 0));
547      }
548    } else {
549      int entry_index = static_cast<int>(
550          reinterpret_cast<intptr_t>(entry->value));
551      EntryInfo& entry_info = entries_.at(entry_index);
552      if (FLAG_heap_profiler_trace_objects) {
553        heap_objects.Add(HeapObjectInfo(obj,
554                         static_cast<int>(entry_info.size)));
555        if (obj->Size() != static_cast<int>(entry_info.size))
556          ++untracked;
557      } else {
558        CHECK_EQ(obj->Size(), static_cast<int>(entry_info.size));
559      }
560    }
561  }
562  if (FLAG_heap_profiler_trace_objects) {
563    PrintF("\nBegin HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n",
564           entries_map_.occupancy());
565    heap_objects.Sort(comparator);
566    int last_printed_object = -1;
567    bool print_next_object = false;
568    for (int i = 0; i < heap_objects.length(); ++i) {
569      const HeapObjectInfo& object_info = heap_objects[i];
570      if (!object_info.IsValid()) {
571        ++untracked;
572        if (last_printed_object != i - 1) {
573          if (i > 0) {
574            PrintF("%d objects were skipped\n", i - 1 - last_printed_object);
575            heap_objects[i - 1].Print();
576          }
577        }
578        object_info.Print();
579        last_printed_object = i;
580        print_next_object = true;
581      } else if (print_next_object) {
582        object_info.Print();
583        print_next_object = false;
584        last_printed_object = i;
585      }
586    }
587    if (last_printed_object < heap_objects.length() - 1) {
588      PrintF("Last %d objects were skipped\n",
589             heap_objects.length() - 1 - last_printed_object);
590    }
591    PrintF("End HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n\n",
592           entries_map_.occupancy());
593  }
594  return untracked;
595}
596
597
598SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream,
599                                                      int64_t* timestamp_us) {
600  UpdateHeapObjectsMap();
601  time_intervals_.Add(TimeInterval(next_id_));
602  int prefered_chunk_size = stream->GetChunkSize();
603  List<v8::HeapStatsUpdate> stats_buffer;
604  DCHECK(!entries_.is_empty());
605  EntryInfo* entry_info = &entries_.first();
606  EntryInfo* end_entry_info = &entries_.last() + 1;
607  for (int time_interval_index = 0;
608       time_interval_index < time_intervals_.length();
609       ++time_interval_index) {
610    TimeInterval& time_interval = time_intervals_[time_interval_index];
611    SnapshotObjectId time_interval_id = time_interval.id;
612    uint32_t entries_size = 0;
613    EntryInfo* start_entry_info = entry_info;
614    while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
615      entries_size += entry_info->size;
616      ++entry_info;
617    }
618    uint32_t entries_count =
619        static_cast<uint32_t>(entry_info - start_entry_info);
620    if (time_interval.count != entries_count ||
621        time_interval.size != entries_size) {
622      stats_buffer.Add(v8::HeapStatsUpdate(
623          time_interval_index,
624          time_interval.count = entries_count,
625          time_interval.size = entries_size));
626      if (stats_buffer.length() >= prefered_chunk_size) {
627        OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
628            &stats_buffer.first(), stats_buffer.length());
629        if (result == OutputStream::kAbort) return last_assigned_id();
630        stats_buffer.Clear();
631      }
632    }
633  }
634  DCHECK(entry_info == end_entry_info);
635  if (!stats_buffer.is_empty()) {
636    OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
637        &stats_buffer.first(), stats_buffer.length());
638    if (result == OutputStream::kAbort) return last_assigned_id();
639  }
640  stream->EndOfStream();
641  if (timestamp_us) {
642    *timestamp_us = (time_intervals_.last().timestamp -
643                     time_intervals_[0].timestamp).InMicroseconds();
644  }
645  return last_assigned_id();
646}
647
648
649void HeapObjectsMap::RemoveDeadEntries() {
650  DCHECK(entries_.length() > 0 &&
651         entries_.at(0).id == 0 &&
652         entries_.at(0).addr == NULL);
653  int first_free_entry = 1;
654  for (int i = 1; i < entries_.length(); ++i) {
655    EntryInfo& entry_info = entries_.at(i);
656    if (entry_info.accessed) {
657      if (first_free_entry != i) {
658        entries_.at(first_free_entry) = entry_info;
659      }
660      entries_.at(first_free_entry).accessed = false;
661      base::HashMap::Entry* entry = entries_map_.Lookup(
662          entry_info.addr, ComputePointerHash(entry_info.addr));
663      DCHECK(entry);
664      entry->value = reinterpret_cast<void*>(first_free_entry);
665      ++first_free_entry;
666    } else {
667      if (entry_info.addr) {
668        entries_map_.Remove(entry_info.addr,
669                            ComputePointerHash(entry_info.addr));
670      }
671    }
672  }
673  entries_.Rewind(first_free_entry);
674  DCHECK(static_cast<uint32_t>(entries_.length()) - 1 ==
675         entries_map_.occupancy());
676}
677
678
679SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
680  SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
681  const char* label = info->GetLabel();
682  id ^= StringHasher::HashSequentialString(label,
683                                           static_cast<int>(strlen(label)),
684                                           heap_->HashSeed());
685  intptr_t element_count = info->GetElementCount();
686  if (element_count != -1)
687    id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
688                             v8::internal::kZeroHashSeed);
689  return id << 1;
690}
691
692
693size_t HeapObjectsMap::GetUsedMemorySize() const {
694  return sizeof(*this) +
695         sizeof(base::HashMap::Entry) * entries_map_.capacity() +
696         GetMemoryUsedByList(entries_) + GetMemoryUsedByList(time_intervals_);
697}
698
699HeapEntriesMap::HeapEntriesMap() : entries_() {}
700
701int HeapEntriesMap::Map(HeapThing thing) {
702  base::HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing));
703  if (cache_entry == NULL) return HeapEntry::kNoEntry;
704  return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
705}
706
707
708void HeapEntriesMap::Pair(HeapThing thing, int entry) {
709  base::HashMap::Entry* cache_entry =
710      entries_.LookupOrInsert(thing, Hash(thing));
711  DCHECK(cache_entry->value == NULL);
712  cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
713}
714
715HeapObjectsSet::HeapObjectsSet() : entries_() {}
716
717void HeapObjectsSet::Clear() {
718  entries_.Clear();
719}
720
721
722bool HeapObjectsSet::Contains(Object* obj) {
723  if (!obj->IsHeapObject()) return false;
724  HeapObject* object = HeapObject::cast(obj);
725  return entries_.Lookup(object, HeapEntriesMap::Hash(object)) != NULL;
726}
727
728
729void HeapObjectsSet::Insert(Object* obj) {
730  if (!obj->IsHeapObject()) return;
731  HeapObject* object = HeapObject::cast(obj);
732  entries_.LookupOrInsert(object, HeapEntriesMap::Hash(object));
733}
734
735
736const char* HeapObjectsSet::GetTag(Object* obj) {
737  HeapObject* object = HeapObject::cast(obj);
738  base::HashMap::Entry* cache_entry =
739      entries_.Lookup(object, HeapEntriesMap::Hash(object));
740  return cache_entry != NULL
741      ? reinterpret_cast<const char*>(cache_entry->value)
742      : NULL;
743}
744
745
746V8_NOINLINE void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
747  if (!obj->IsHeapObject()) return;
748  HeapObject* object = HeapObject::cast(obj);
749  base::HashMap::Entry* cache_entry =
750      entries_.LookupOrInsert(object, HeapEntriesMap::Hash(object));
751  cache_entry->value = const_cast<char*>(tag);
752}
753
754
755V8HeapExplorer::V8HeapExplorer(
756    HeapSnapshot* snapshot,
757    SnapshottingProgressReportingInterface* progress,
758    v8::HeapProfiler::ObjectNameResolver* resolver)
759    : heap_(snapshot->profiler()->heap_object_map()->heap()),
760      snapshot_(snapshot),
761      names_(snapshot_->profiler()->names()),
762      heap_object_map_(snapshot_->profiler()->heap_object_map()),
763      progress_(progress),
764      filler_(NULL),
765      global_object_name_resolver_(resolver) {
766}
767
768
769V8HeapExplorer::~V8HeapExplorer() {
770}
771
772
773HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
774  return AddEntry(reinterpret_cast<HeapObject*>(ptr));
775}
776
777
778HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
779  if (object->IsJSFunction()) {
780    JSFunction* func = JSFunction::cast(object);
781    SharedFunctionInfo* shared = func->shared();
782    const char* name = names_->GetName(String::cast(shared->name()));
783    return AddEntry(object, HeapEntry::kClosure, name);
784  } else if (object->IsJSBoundFunction()) {
785    return AddEntry(object, HeapEntry::kClosure, "native_bind");
786  } else if (object->IsJSRegExp()) {
787    JSRegExp* re = JSRegExp::cast(object);
788    return AddEntry(object,
789                    HeapEntry::kRegExp,
790                    names_->GetName(re->Pattern()));
791  } else if (object->IsJSObject()) {
792    const char* name = names_->GetName(
793        GetConstructorName(JSObject::cast(object)));
794    if (object->IsJSGlobalObject()) {
795      const char* tag = objects_tags_.GetTag(object);
796      if (tag != NULL) {
797        name = names_->GetFormatted("%s / %s", name, tag);
798      }
799    }
800    return AddEntry(object, HeapEntry::kObject, name);
801  } else if (object->IsString()) {
802    String* string = String::cast(object);
803    if (string->IsConsString())
804      return AddEntry(object,
805                      HeapEntry::kConsString,
806                      "(concatenated string)");
807    if (string->IsSlicedString())
808      return AddEntry(object,
809                      HeapEntry::kSlicedString,
810                      "(sliced string)");
811    return AddEntry(object,
812                    HeapEntry::kString,
813                    names_->GetName(String::cast(object)));
814  } else if (object->IsSymbol()) {
815    if (Symbol::cast(object)->is_private())
816      return AddEntry(object, HeapEntry::kHidden, "private symbol");
817    else
818      return AddEntry(object, HeapEntry::kSymbol, "symbol");
819  } else if (object->IsCode()) {
820    return AddEntry(object, HeapEntry::kCode, "");
821  } else if (object->IsSharedFunctionInfo()) {
822    String* name = String::cast(SharedFunctionInfo::cast(object)->name());
823    return AddEntry(object,
824                    HeapEntry::kCode,
825                    names_->GetName(name));
826  } else if (object->IsScript()) {
827    Object* name = Script::cast(object)->name();
828    return AddEntry(object,
829                    HeapEntry::kCode,
830                    name->IsString()
831                        ? names_->GetName(String::cast(name))
832                        : "");
833  } else if (object->IsNativeContext()) {
834    return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
835  } else if (object->IsContext()) {
836    return AddEntry(object, HeapEntry::kObject, "system / Context");
837  } else if (object->IsFixedArray() || object->IsFixedDoubleArray() ||
838             object->IsByteArray()) {
839    return AddEntry(object, HeapEntry::kArray, "");
840  } else if (object->IsHeapNumber()) {
841    return AddEntry(object, HeapEntry::kHeapNumber, "number");
842  } else if (object->IsSimd128Value()) {
843    return AddEntry(object, HeapEntry::kSimdValue, "simd");
844  }
845  return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
846}
847
848
849HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
850                                    HeapEntry::Type type,
851                                    const char* name) {
852  return AddEntry(object->address(), type, name, object->Size());
853}
854
855
856HeapEntry* V8HeapExplorer::AddEntry(Address address,
857                                    HeapEntry::Type type,
858                                    const char* name,
859                                    size_t size) {
860  SnapshotObjectId object_id = heap_object_map_->FindOrAddEntry(
861      address, static_cast<unsigned int>(size));
862  unsigned trace_node_id = 0;
863  if (AllocationTracker* allocation_tracker =
864      snapshot_->profiler()->allocation_tracker()) {
865    trace_node_id =
866        allocation_tracker->address_to_trace()->GetTraceNodeId(address);
867  }
868  return snapshot_->AddEntry(type, name, object_id, size, trace_node_id);
869}
870
871
872class SnapshotFiller {
873 public:
874  explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
875      : snapshot_(snapshot),
876        names_(snapshot->profiler()->names()),
877        entries_(entries) { }
878  HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
879    HeapEntry* entry = allocator->AllocateEntry(ptr);
880    entries_->Pair(ptr, entry->index());
881    return entry;
882  }
883  HeapEntry* FindEntry(HeapThing ptr) {
884    int index = entries_->Map(ptr);
885    return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
886  }
887  HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
888    HeapEntry* entry = FindEntry(ptr);
889    return entry != NULL ? entry : AddEntry(ptr, allocator);
890  }
891  void SetIndexedReference(HeapGraphEdge::Type type,
892                           int parent,
893                           int index,
894                           HeapEntry* child_entry) {
895    HeapEntry* parent_entry = &snapshot_->entries()[parent];
896    parent_entry->SetIndexedReference(type, index, child_entry);
897  }
898  void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
899                                    int parent,
900                                    HeapEntry* child_entry) {
901    HeapEntry* parent_entry = &snapshot_->entries()[parent];
902    int index = parent_entry->children_count() + 1;
903    parent_entry->SetIndexedReference(type, index, child_entry);
904  }
905  void SetNamedReference(HeapGraphEdge::Type type,
906                         int parent,
907                         const char* reference_name,
908                         HeapEntry* child_entry) {
909    HeapEntry* parent_entry = &snapshot_->entries()[parent];
910    parent_entry->SetNamedReference(type, reference_name, child_entry);
911  }
912  void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
913                                  int parent,
914                                  HeapEntry* child_entry) {
915    HeapEntry* parent_entry = &snapshot_->entries()[parent];
916    int index = parent_entry->children_count() + 1;
917    parent_entry->SetNamedReference(
918        type,
919        names_->GetName(index),
920        child_entry);
921  }
922
923 private:
924  HeapSnapshot* snapshot_;
925  StringsStorage* names_;
926  HeapEntriesMap* entries_;
927};
928
929
930const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
931  switch (object->map()->instance_type()) {
932    case MAP_TYPE:
933      switch (Map::cast(object)->instance_type()) {
934#define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
935        case instance_type: return "system / Map (" #Name ")";
936      STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
937#undef MAKE_STRING_MAP_CASE
938        default: return "system / Map";
939      }
940    case CELL_TYPE: return "system / Cell";
941    case PROPERTY_CELL_TYPE: return "system / PropertyCell";
942    case FOREIGN_TYPE: return "system / Foreign";
943    case ODDBALL_TYPE: return "system / Oddball";
944#define MAKE_STRUCT_CASE(NAME, Name, name) \
945    case NAME##_TYPE: return "system / "#Name;
946  STRUCT_LIST(MAKE_STRUCT_CASE)
947#undef MAKE_STRUCT_CASE
948    default: return "system";
949  }
950}
951
952
953int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
954  int objects_count = 0;
955  for (HeapObject* obj = iterator->next();
956       obj != NULL;
957       obj = iterator->next()) {
958    objects_count++;
959  }
960  return objects_count;
961}
962
963
964class IndexedReferencesExtractor : public ObjectVisitor {
965 public:
966  IndexedReferencesExtractor(V8HeapExplorer* generator, HeapObject* parent_obj,
967                             int parent)
968      : generator_(generator),
969        parent_obj_(parent_obj),
970        parent_start_(HeapObject::RawField(parent_obj_, 0)),
971        parent_end_(HeapObject::RawField(parent_obj_, parent_obj_->Size())),
972        parent_(parent),
973        next_index_(0) {}
974  void VisitCodeEntry(Address entry_address) override {
975     Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
976     generator_->SetInternalReference(parent_obj_, parent_, "code", code);
977     generator_->TagCodeObject(code);
978  }
979  void VisitPointers(Object** start, Object** end) override {
980    for (Object** p = start; p < end; p++) {
981      int index = static_cast<int>(p - HeapObject::RawField(parent_obj_, 0));
982      ++next_index_;
983      // |p| could be outside of the object, e.g., while visiting RelocInfo of
984      // code objects.
985      if (p >= parent_start_ && p < parent_end_ && generator_->marks_[index]) {
986        generator_->marks_[index] = false;
987        continue;
988      }
989      generator_->SetHiddenReference(parent_obj_, parent_, next_index_, *p,
990                                     index * kPointerSize);
991    }
992  }
993
994 private:
995  V8HeapExplorer* generator_;
996  HeapObject* parent_obj_;
997  Object** parent_start_;
998  Object** parent_end_;
999  int parent_;
1000  int next_index_;
1001};
1002
1003
1004bool V8HeapExplorer::ExtractReferencesPass1(int entry, HeapObject* obj) {
1005  if (obj->IsFixedArray()) return false;  // FixedArrays are processed on pass 2
1006
1007  if (obj->IsJSGlobalProxy()) {
1008    ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj));
1009  } else if (obj->IsJSArrayBuffer()) {
1010    ExtractJSArrayBufferReferences(entry, JSArrayBuffer::cast(obj));
1011  } else if (obj->IsJSObject()) {
1012    if (obj->IsJSWeakSet()) {
1013      ExtractJSWeakCollectionReferences(entry, JSWeakSet::cast(obj));
1014    } else if (obj->IsJSWeakMap()) {
1015      ExtractJSWeakCollectionReferences(entry, JSWeakMap::cast(obj));
1016    } else if (obj->IsJSSet()) {
1017      ExtractJSCollectionReferences(entry, JSSet::cast(obj));
1018    } else if (obj->IsJSMap()) {
1019      ExtractJSCollectionReferences(entry, JSMap::cast(obj));
1020    }
1021    ExtractJSObjectReferences(entry, JSObject::cast(obj));
1022  } else if (obj->IsString()) {
1023    ExtractStringReferences(entry, String::cast(obj));
1024  } else if (obj->IsSymbol()) {
1025    ExtractSymbolReferences(entry, Symbol::cast(obj));
1026  } else if (obj->IsMap()) {
1027    ExtractMapReferences(entry, Map::cast(obj));
1028  } else if (obj->IsSharedFunctionInfo()) {
1029    ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1030  } else if (obj->IsScript()) {
1031    ExtractScriptReferences(entry, Script::cast(obj));
1032  } else if (obj->IsAccessorInfo()) {
1033    ExtractAccessorInfoReferences(entry, AccessorInfo::cast(obj));
1034  } else if (obj->IsAccessorPair()) {
1035    ExtractAccessorPairReferences(entry, AccessorPair::cast(obj));
1036  } else if (obj->IsCode()) {
1037    ExtractCodeReferences(entry, Code::cast(obj));
1038  } else if (obj->IsBox()) {
1039    ExtractBoxReferences(entry, Box::cast(obj));
1040  } else if (obj->IsCell()) {
1041    ExtractCellReferences(entry, Cell::cast(obj));
1042  } else if (obj->IsWeakCell()) {
1043    ExtractWeakCellReferences(entry, WeakCell::cast(obj));
1044  } else if (obj->IsPropertyCell()) {
1045    ExtractPropertyCellReferences(entry, PropertyCell::cast(obj));
1046  } else if (obj->IsAllocationSite()) {
1047    ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj));
1048  }
1049  return true;
1050}
1051
1052
1053bool V8HeapExplorer::ExtractReferencesPass2(int entry, HeapObject* obj) {
1054  if (!obj->IsFixedArray()) return false;
1055
1056  if (obj->IsContext()) {
1057    ExtractContextReferences(entry, Context::cast(obj));
1058  } else {
1059    ExtractFixedArrayReferences(entry, FixedArray::cast(obj));
1060  }
1061  return true;
1062}
1063
1064
1065void V8HeapExplorer::ExtractJSGlobalProxyReferences(
1066    int entry, JSGlobalProxy* proxy) {
1067  SetInternalReference(proxy, entry,
1068                       "native_context", proxy->native_context(),
1069                       JSGlobalProxy::kNativeContextOffset);
1070}
1071
1072
1073void V8HeapExplorer::ExtractJSObjectReferences(
1074    int entry, JSObject* js_obj) {
1075  HeapObject* obj = js_obj;
1076  ExtractPropertyReferences(js_obj, entry);
1077  ExtractElementReferences(js_obj, entry);
1078  ExtractInternalReferences(js_obj, entry);
1079  PrototypeIterator iter(heap_->isolate(), js_obj);
1080  SetPropertyReference(obj, entry, heap_->proto_string(), iter.GetCurrent());
1081  if (obj->IsJSBoundFunction()) {
1082    JSBoundFunction* js_fun = JSBoundFunction::cast(obj);
1083    TagObject(js_fun->bound_arguments(), "(bound arguments)");
1084    SetInternalReference(js_fun, entry, "bindings", js_fun->bound_arguments(),
1085                         JSBoundFunction::kBoundArgumentsOffset);
1086    SetInternalReference(js_obj, entry, "bound_this", js_fun->bound_this(),
1087                         JSBoundFunction::kBoundThisOffset);
1088    SetInternalReference(js_obj, entry, "bound_function",
1089                         js_fun->bound_target_function(),
1090                         JSBoundFunction::kBoundTargetFunctionOffset);
1091    FixedArray* bindings = js_fun->bound_arguments();
1092    for (int i = 0; i < bindings->length(); i++) {
1093      const char* reference_name = names_->GetFormatted("bound_argument_%d", i);
1094      SetNativeBindReference(js_obj, entry, reference_name, bindings->get(i));
1095    }
1096  } else if (obj->IsJSFunction()) {
1097    JSFunction* js_fun = JSFunction::cast(js_obj);
1098    Object* proto_or_map = js_fun->prototype_or_initial_map();
1099    if (!proto_or_map->IsTheHole(heap_->isolate())) {
1100      if (!proto_or_map->IsMap()) {
1101        SetPropertyReference(
1102            obj, entry,
1103            heap_->prototype_string(), proto_or_map,
1104            NULL,
1105            JSFunction::kPrototypeOrInitialMapOffset);
1106      } else {
1107        SetPropertyReference(
1108            obj, entry,
1109            heap_->prototype_string(), js_fun->prototype());
1110        SetInternalReference(
1111            obj, entry, "initial_map", proto_or_map,
1112            JSFunction::kPrototypeOrInitialMapOffset);
1113      }
1114    }
1115    SharedFunctionInfo* shared_info = js_fun->shared();
1116    TagObject(js_fun->literals(), "(function literals)");
1117    SetInternalReference(js_fun, entry, "literals", js_fun->literals(),
1118                         JSFunction::kLiteralsOffset);
1119    TagObject(shared_info, "(shared function info)");
1120    SetInternalReference(js_fun, entry,
1121                         "shared", shared_info,
1122                         JSFunction::kSharedFunctionInfoOffset);
1123    TagObject(js_fun->context(), "(context)");
1124    SetInternalReference(js_fun, entry,
1125                         "context", js_fun->context(),
1126                         JSFunction::kContextOffset);
1127    // Ensure no new weak references appeared in JSFunction.
1128    STATIC_ASSERT(JSFunction::kCodeEntryOffset ==
1129                  JSFunction::kNonWeakFieldsEndOffset);
1130    STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
1131                  JSFunction::kNextFunctionLinkOffset);
1132    STATIC_ASSERT(JSFunction::kNextFunctionLinkOffset + kPointerSize
1133                 == JSFunction::kSize);
1134  } else if (obj->IsJSGlobalObject()) {
1135    JSGlobalObject* global_obj = JSGlobalObject::cast(obj);
1136    SetInternalReference(global_obj, entry, "native_context",
1137                         global_obj->native_context(),
1138                         JSGlobalObject::kNativeContextOffset);
1139    SetInternalReference(global_obj, entry, "global_proxy",
1140                         global_obj->global_proxy(),
1141                         JSGlobalObject::kGlobalProxyOffset);
1142    STATIC_ASSERT(JSGlobalObject::kSize - JSObject::kHeaderSize ==
1143                  2 * kPointerSize);
1144  } else if (obj->IsJSArrayBufferView()) {
1145    JSArrayBufferView* view = JSArrayBufferView::cast(obj);
1146    SetInternalReference(view, entry, "buffer", view->buffer(),
1147                         JSArrayBufferView::kBufferOffset);
1148  }
1149  TagObject(js_obj->properties(), "(object properties)");
1150  SetInternalReference(obj, entry,
1151                       "properties", js_obj->properties(),
1152                       JSObject::kPropertiesOffset);
1153  TagObject(js_obj->elements(), "(object elements)");
1154  SetInternalReference(obj, entry,
1155                       "elements", js_obj->elements(),
1156                       JSObject::kElementsOffset);
1157}
1158
1159
1160void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1161  if (string->IsConsString()) {
1162    ConsString* cs = ConsString::cast(string);
1163    SetInternalReference(cs, entry, "first", cs->first(),
1164                         ConsString::kFirstOffset);
1165    SetInternalReference(cs, entry, "second", cs->second(),
1166                         ConsString::kSecondOffset);
1167  } else if (string->IsSlicedString()) {
1168    SlicedString* ss = SlicedString::cast(string);
1169    SetInternalReference(ss, entry, "parent", ss->parent(),
1170                         SlicedString::kParentOffset);
1171  }
1172}
1173
1174
1175void V8HeapExplorer::ExtractSymbolReferences(int entry, Symbol* symbol) {
1176  SetInternalReference(symbol, entry,
1177                       "name", symbol->name(),
1178                       Symbol::kNameOffset);
1179}
1180
1181
1182void V8HeapExplorer::ExtractJSCollectionReferences(int entry,
1183                                                   JSCollection* collection) {
1184  SetInternalReference(collection, entry, "table", collection->table(),
1185                       JSCollection::kTableOffset);
1186}
1187
1188void V8HeapExplorer::ExtractJSWeakCollectionReferences(int entry,
1189                                                       JSWeakCollection* obj) {
1190  if (obj->table()->IsHashTable()) {
1191    ObjectHashTable* table = ObjectHashTable::cast(obj->table());
1192    TagFixedArraySubType(table, JS_WEAK_COLLECTION_SUB_TYPE);
1193  }
1194  SetInternalReference(obj, entry, "table", obj->table(),
1195                       JSWeakCollection::kTableOffset);
1196}
1197
1198void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1199  if (context == context->declaration_context()) {
1200    ScopeInfo* scope_info = context->closure()->shared()->scope_info();
1201    // Add context allocated locals.
1202    int context_locals = scope_info->ContextLocalCount();
1203    for (int i = 0; i < context_locals; ++i) {
1204      String* local_name = scope_info->ContextLocalName(i);
1205      int idx = Context::MIN_CONTEXT_SLOTS + i;
1206      SetContextReference(context, entry, local_name, context->get(idx),
1207                          Context::OffsetOfElementAt(idx));
1208    }
1209    if (scope_info->HasFunctionName()) {
1210      String* name = scope_info->FunctionName();
1211      int idx = scope_info->FunctionContextSlotIndex(name);
1212      if (idx >= 0) {
1213        SetContextReference(context, entry, name, context->get(idx),
1214                            Context::OffsetOfElementAt(idx));
1215      }
1216    }
1217  }
1218
1219#define EXTRACT_CONTEXT_FIELD(index, type, name) \
1220  if (Context::index < Context::FIRST_WEAK_SLOT || \
1221      Context::index == Context::MAP_CACHE_INDEX) { \
1222    SetInternalReference(context, entry, #name, context->get(Context::index), \
1223        FixedArray::OffsetOfElementAt(Context::index)); \
1224  } else { \
1225    SetWeakReference(context, entry, #name, context->get(Context::index), \
1226        FixedArray::OffsetOfElementAt(Context::index)); \
1227  }
1228  EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1229  EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1230  EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, HeapObject, extension);
1231  EXTRACT_CONTEXT_FIELD(NATIVE_CONTEXT_INDEX, Context, native_context);
1232  if (context->IsNativeContext()) {
1233    TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1234    TagObject(context->embedder_data(), "(context data)");
1235    NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD)
1236    EXTRACT_CONTEXT_FIELD(OPTIMIZED_FUNCTIONS_LIST, unused,
1237                          optimized_functions_list);
1238    EXTRACT_CONTEXT_FIELD(OPTIMIZED_CODE_LIST, unused, optimized_code_list);
1239    EXTRACT_CONTEXT_FIELD(DEOPTIMIZED_CODE_LIST, unused, deoptimized_code_list);
1240#undef EXTRACT_CONTEXT_FIELD
1241    STATIC_ASSERT(Context::OPTIMIZED_FUNCTIONS_LIST ==
1242                  Context::FIRST_WEAK_SLOT);
1243    STATIC_ASSERT(Context::NEXT_CONTEXT_LINK + 1 ==
1244                  Context::NATIVE_CONTEXT_SLOTS);
1245    STATIC_ASSERT(Context::FIRST_WEAK_SLOT + 4 ==
1246                  Context::NATIVE_CONTEXT_SLOTS);
1247  }
1248}
1249
1250
1251void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
1252  Object* raw_transitions_or_prototype_info = map->raw_transitions();
1253  if (TransitionArray::IsFullTransitionArray(
1254          raw_transitions_or_prototype_info)) {
1255    TransitionArray* transitions =
1256        TransitionArray::cast(raw_transitions_or_prototype_info);
1257    if (map->CanTransition() && transitions->HasPrototypeTransitions()) {
1258      TagObject(transitions->GetPrototypeTransitions(),
1259                "(prototype transitions)");
1260    }
1261
1262    TagObject(transitions, "(transition array)");
1263    SetInternalReference(map, entry, "transitions", transitions,
1264                         Map::kTransitionsOrPrototypeInfoOffset);
1265  } else if (TransitionArray::IsSimpleTransition(
1266                 raw_transitions_or_prototype_info)) {
1267    TagObject(raw_transitions_or_prototype_info, "(transition)");
1268    SetInternalReference(map, entry, "transition",
1269                         raw_transitions_or_prototype_info,
1270                         Map::kTransitionsOrPrototypeInfoOffset);
1271  } else if (map->is_prototype_map()) {
1272    TagObject(raw_transitions_or_prototype_info, "prototype_info");
1273    SetInternalReference(map, entry, "prototype_info",
1274                         raw_transitions_or_prototype_info,
1275                         Map::kTransitionsOrPrototypeInfoOffset);
1276  }
1277  DescriptorArray* descriptors = map->instance_descriptors();
1278  TagObject(descriptors, "(map descriptors)");
1279  SetInternalReference(map, entry, "descriptors", descriptors,
1280                       Map::kDescriptorsOffset);
1281  SetInternalReference(map, entry, "code_cache", map->code_cache(),
1282                       Map::kCodeCacheOffset);
1283  SetInternalReference(map, entry, "prototype", map->prototype(),
1284                       Map::kPrototypeOffset);
1285#if V8_DOUBLE_FIELDS_UNBOXING
1286  if (FLAG_unbox_double_fields) {
1287    SetInternalReference(map, entry, "layout_descriptor",
1288                         map->layout_descriptor(),
1289                         Map::kLayoutDescriptorOffset);
1290  }
1291#endif
1292  Object* constructor_or_backpointer = map->constructor_or_backpointer();
1293  if (constructor_or_backpointer->IsMap()) {
1294    TagObject(constructor_or_backpointer, "(back pointer)");
1295    SetInternalReference(map, entry, "back_pointer", constructor_or_backpointer,
1296                         Map::kConstructorOrBackPointerOffset);
1297  } else {
1298    SetInternalReference(map, entry, "constructor", constructor_or_backpointer,
1299                         Map::kConstructorOrBackPointerOffset);
1300  }
1301  TagObject(map->dependent_code(), "(dependent code)");
1302  SetInternalReference(map, entry, "dependent_code", map->dependent_code(),
1303                       Map::kDependentCodeOffset);
1304  TagObject(map->weak_cell_cache(), "(weak cell)");
1305  SetInternalReference(map, entry, "weak_cell_cache", map->weak_cell_cache(),
1306                       Map::kWeakCellCacheOffset);
1307}
1308
1309
1310void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1311    int entry, SharedFunctionInfo* shared) {
1312  HeapObject* obj = shared;
1313  String* shared_name = shared->DebugName();
1314  const char* name = NULL;
1315  if (shared_name != heap_->empty_string()) {
1316    name = names_->GetName(shared_name);
1317    TagObject(shared->code(), names_->GetFormatted("(code for %s)", name));
1318  } else {
1319    TagObject(shared->code(), names_->GetFormatted("(%s code)",
1320        Code::Kind2String(shared->code()->kind())));
1321  }
1322
1323  SetInternalReference(obj, entry,
1324                       "name", shared->name(),
1325                       SharedFunctionInfo::kNameOffset);
1326  SetInternalReference(obj, entry,
1327                       "code", shared->code(),
1328                       SharedFunctionInfo::kCodeOffset);
1329  TagObject(shared->scope_info(), "(function scope info)");
1330  SetInternalReference(obj, entry,
1331                       "scope_info", shared->scope_info(),
1332                       SharedFunctionInfo::kScopeInfoOffset);
1333  SetInternalReference(obj, entry,
1334                       "instance_class_name", shared->instance_class_name(),
1335                       SharedFunctionInfo::kInstanceClassNameOffset);
1336  SetInternalReference(obj, entry,
1337                       "script", shared->script(),
1338                       SharedFunctionInfo::kScriptOffset);
1339  const char* construct_stub_name = name ?
1340      names_->GetFormatted("(construct stub code for %s)", name) :
1341      "(construct stub code)";
1342  TagObject(shared->construct_stub(), construct_stub_name);
1343  SetInternalReference(obj, entry,
1344                       "construct_stub", shared->construct_stub(),
1345                       SharedFunctionInfo::kConstructStubOffset);
1346  SetInternalReference(obj, entry,
1347                       "function_data", shared->function_data(),
1348                       SharedFunctionInfo::kFunctionDataOffset);
1349  SetInternalReference(obj, entry,
1350                       "debug_info", shared->debug_info(),
1351                       SharedFunctionInfo::kDebugInfoOffset);
1352  SetInternalReference(obj, entry, "function_identifier",
1353                       shared->function_identifier(),
1354                       SharedFunctionInfo::kFunctionIdentifierOffset);
1355  SetInternalReference(obj, entry,
1356                       "optimized_code_map", shared->optimized_code_map(),
1357                       SharedFunctionInfo::kOptimizedCodeMapOffset);
1358  SetInternalReference(obj, entry, "feedback_metadata",
1359                       shared->feedback_metadata(),
1360                       SharedFunctionInfo::kFeedbackMetadataOffset);
1361}
1362
1363
1364void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
1365  HeapObject* obj = script;
1366  SetInternalReference(obj, entry,
1367                       "source", script->source(),
1368                       Script::kSourceOffset);
1369  SetInternalReference(obj, entry,
1370                       "name", script->name(),
1371                       Script::kNameOffset);
1372  SetInternalReference(obj, entry,
1373                       "context_data", script->context_data(),
1374                       Script::kContextOffset);
1375  TagObject(script->line_ends(), "(script line ends)");
1376  SetInternalReference(obj, entry,
1377                       "line_ends", script->line_ends(),
1378                       Script::kLineEndsOffset);
1379}
1380
1381
1382void V8HeapExplorer::ExtractAccessorInfoReferences(
1383    int entry, AccessorInfo* accessor_info) {
1384  SetInternalReference(accessor_info, entry, "name", accessor_info->name(),
1385                       AccessorInfo::kNameOffset);
1386  SetInternalReference(accessor_info, entry, "expected_receiver_type",
1387                       accessor_info->expected_receiver_type(),
1388                       AccessorInfo::kExpectedReceiverTypeOffset);
1389  if (accessor_info->IsAccessorInfo()) {
1390    AccessorInfo* executable_accessor_info = AccessorInfo::cast(accessor_info);
1391    SetInternalReference(executable_accessor_info, entry, "getter",
1392                         executable_accessor_info->getter(),
1393                         AccessorInfo::kGetterOffset);
1394    SetInternalReference(executable_accessor_info, entry, "setter",
1395                         executable_accessor_info->setter(),
1396                         AccessorInfo::kSetterOffset);
1397    SetInternalReference(executable_accessor_info, entry, "data",
1398                         executable_accessor_info->data(),
1399                         AccessorInfo::kDataOffset);
1400  }
1401}
1402
1403
1404void V8HeapExplorer::ExtractAccessorPairReferences(
1405    int entry, AccessorPair* accessors) {
1406  SetInternalReference(accessors, entry, "getter", accessors->getter(),
1407                       AccessorPair::kGetterOffset);
1408  SetInternalReference(accessors, entry, "setter", accessors->setter(),
1409                       AccessorPair::kSetterOffset);
1410}
1411
1412
1413void V8HeapExplorer::TagBuiltinCodeObject(Code* code, const char* name) {
1414  TagObject(code, names_->GetFormatted("(%s builtin)", name));
1415}
1416
1417
1418void V8HeapExplorer::TagCodeObject(Code* code) {
1419  if (code->kind() == Code::STUB) {
1420    TagObject(code, names_->GetFormatted(
1421                        "(%s code)",
1422                        CodeStub::MajorName(CodeStub::GetMajorKey(code))));
1423  }
1424}
1425
1426
1427void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
1428  TagCodeObject(code);
1429  TagObject(code->relocation_info(), "(code relocation info)");
1430  SetInternalReference(code, entry,
1431                       "relocation_info", code->relocation_info(),
1432                       Code::kRelocationInfoOffset);
1433  SetInternalReference(code, entry,
1434                       "handler_table", code->handler_table(),
1435                       Code::kHandlerTableOffset);
1436  TagObject(code->deoptimization_data(), "(code deopt data)");
1437  SetInternalReference(code, entry,
1438                       "deoptimization_data", code->deoptimization_data(),
1439                       Code::kDeoptimizationDataOffset);
1440  TagObject(code->source_position_table(), "(source position table)");
1441  SetInternalReference(code, entry, "source_position_table",
1442                       code->source_position_table(),
1443                       Code::kSourcePositionTableOffset);
1444  if (code->kind() == Code::FUNCTION) {
1445    SetInternalReference(code, entry, "type_feedback_info",
1446                         code->type_feedback_info(),
1447                         Code::kTypeFeedbackInfoOffset);
1448  }
1449  SetInternalReference(code, entry, "gc_metadata", code->gc_metadata(),
1450                       Code::kGCMetadataOffset);
1451}
1452
1453void V8HeapExplorer::ExtractBoxReferences(int entry, Box* box) {
1454  SetInternalReference(box, entry, "value", box->value(), Box::kValueOffset);
1455}
1456
1457void V8HeapExplorer::ExtractCellReferences(int entry, Cell* cell) {
1458  SetInternalReference(cell, entry, "value", cell->value(), Cell::kValueOffset);
1459}
1460
1461void V8HeapExplorer::ExtractWeakCellReferences(int entry, WeakCell* weak_cell) {
1462  TagObject(weak_cell, "(weak cell)");
1463  SetWeakReference(weak_cell, entry, "value", weak_cell->value(),
1464                   WeakCell::kValueOffset);
1465}
1466
1467void V8HeapExplorer::ExtractPropertyCellReferences(int entry,
1468                                                   PropertyCell* cell) {
1469  SetInternalReference(cell, entry, "value", cell->value(),
1470                       PropertyCell::kValueOffset);
1471  TagObject(cell->dependent_code(), "(dependent code)");
1472  SetInternalReference(cell, entry, "dependent_code", cell->dependent_code(),
1473                       PropertyCell::kDependentCodeOffset);
1474}
1475
1476
1477void V8HeapExplorer::ExtractAllocationSiteReferences(int entry,
1478                                                     AllocationSite* site) {
1479  SetInternalReference(site, entry, "transition_info", site->transition_info(),
1480                       AllocationSite::kTransitionInfoOffset);
1481  SetInternalReference(site, entry, "nested_site", site->nested_site(),
1482                       AllocationSite::kNestedSiteOffset);
1483  TagObject(site->dependent_code(), "(dependent code)");
1484  SetInternalReference(site, entry, "dependent_code", site->dependent_code(),
1485                       AllocationSite::kDependentCodeOffset);
1486  // Do not visit weak_next as it is not visited by the StaticVisitor,
1487  // and we're not very interested in weak_next field here.
1488  STATIC_ASSERT(AllocationSite::kWeakNextOffset >=
1489                AllocationSite::kPointerFieldsEndOffset);
1490}
1491
1492
1493class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator {
1494 public:
1495  JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer* explorer)
1496      : size_(size)
1497      , explorer_(explorer) {
1498  }
1499  virtual HeapEntry* AllocateEntry(HeapThing ptr) {
1500    return explorer_->AddEntry(
1501        static_cast<Address>(ptr),
1502        HeapEntry::kNative, "system / JSArrayBufferData", size_);
1503  }
1504 private:
1505  size_t size_;
1506  V8HeapExplorer* explorer_;
1507};
1508
1509
1510void V8HeapExplorer::ExtractJSArrayBufferReferences(
1511    int entry, JSArrayBuffer* buffer) {
1512  // Setup a reference to a native memory backing_store object.
1513  if (!buffer->backing_store())
1514    return;
1515  size_t data_size = NumberToSize(buffer->byte_length());
1516  JSArrayBufferDataEntryAllocator allocator(data_size, this);
1517  HeapEntry* data_entry =
1518      filler_->FindOrAddEntry(buffer->backing_store(), &allocator);
1519  filler_->SetNamedReference(HeapGraphEdge::kInternal,
1520                             entry, "backing_store", data_entry);
1521}
1522
1523void V8HeapExplorer::ExtractFixedArrayReferences(int entry, FixedArray* array) {
1524  auto it = array_types_.find(array);
1525  if (it == array_types_.end()) {
1526    for (int i = 0, l = array->length(); i < l; ++i) {
1527      SetInternalReference(array, entry, i, array->get(i),
1528                           array->OffsetOfElementAt(i));
1529    }
1530    return;
1531  }
1532  switch (it->second) {
1533    case JS_WEAK_COLLECTION_SUB_TYPE:
1534      for (int i = 0, l = array->length(); i < l; ++i) {
1535        SetWeakReference(array, entry, i, array->get(i),
1536                         array->OffsetOfElementAt(i));
1537      }
1538      break;
1539
1540    // TODO(alph): Add special processing for other types of FixedArrays.
1541
1542    default:
1543      for (int i = 0, l = array->length(); i < l; ++i) {
1544        SetInternalReference(array, entry, i, array->get(i),
1545                             array->OffsetOfElementAt(i));
1546      }
1547      break;
1548  }
1549}
1550
1551void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
1552  Isolate* isolate = js_obj->GetIsolate();
1553  if (js_obj->HasFastProperties()) {
1554    DescriptorArray* descs = js_obj->map()->instance_descriptors();
1555    int real_size = js_obj->map()->NumberOfOwnDescriptors();
1556    for (int i = 0; i < real_size; i++) {
1557      PropertyDetails details = descs->GetDetails(i);
1558      switch (details.location()) {
1559        case kField: {
1560          Representation r = details.representation();
1561          if (r.IsSmi() || r.IsDouble()) break;
1562
1563          Name* k = descs->GetKey(i);
1564          FieldIndex field_index = FieldIndex::ForDescriptor(js_obj->map(), i);
1565          Object* value = js_obj->RawFastPropertyAt(field_index);
1566          int field_offset =
1567              field_index.is_inobject() ? field_index.offset() : -1;
1568
1569          SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry, k,
1570                                             value, NULL, field_offset);
1571          break;
1572        }
1573        case kDescriptor:
1574          SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1575                                             descs->GetKey(i),
1576                                             descs->GetValue(i));
1577          break;
1578      }
1579    }
1580  } else if (js_obj->IsJSGlobalObject()) {
1581    // We assume that global objects can only have slow properties.
1582    GlobalDictionary* dictionary = js_obj->global_dictionary();
1583    int length = dictionary->Capacity();
1584    for (int i = 0; i < length; ++i) {
1585      Object* k = dictionary->KeyAt(i);
1586      if (dictionary->IsKey(isolate, k)) {
1587        DCHECK(dictionary->ValueAt(i)->IsPropertyCell());
1588        PropertyCell* cell = PropertyCell::cast(dictionary->ValueAt(i));
1589        Object* value = cell->value();
1590        PropertyDetails details = cell->property_details();
1591        SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1592                                           Name::cast(k), value);
1593      }
1594    }
1595  } else {
1596    NameDictionary* dictionary = js_obj->property_dictionary();
1597    int length = dictionary->Capacity();
1598    for (int i = 0; i < length; ++i) {
1599      Object* k = dictionary->KeyAt(i);
1600      if (dictionary->IsKey(isolate, k)) {
1601        Object* value = dictionary->ValueAt(i);
1602        PropertyDetails details = dictionary->DetailsAt(i);
1603        SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1604                                           Name::cast(k), value);
1605      }
1606    }
1607  }
1608}
1609
1610
1611void V8HeapExplorer::ExtractAccessorPairProperty(JSObject* js_obj, int entry,
1612                                                 Name* key,
1613                                                 Object* callback_obj,
1614                                                 int field_offset) {
1615  if (!callback_obj->IsAccessorPair()) return;
1616  AccessorPair* accessors = AccessorPair::cast(callback_obj);
1617  SetPropertyReference(js_obj, entry, key, accessors, NULL, field_offset);
1618  Object* getter = accessors->getter();
1619  if (!getter->IsOddball()) {
1620    SetPropertyReference(js_obj, entry, key, getter, "get %s");
1621  }
1622  Object* setter = accessors->setter();
1623  if (!setter->IsOddball()) {
1624    SetPropertyReference(js_obj, entry, key, setter, "set %s");
1625  }
1626}
1627
1628
1629void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
1630  Isolate* isolate = js_obj->GetIsolate();
1631  if (js_obj->HasFastObjectElements()) {
1632    FixedArray* elements = FixedArray::cast(js_obj->elements());
1633    int length = js_obj->IsJSArray() ?
1634        Smi::cast(JSArray::cast(js_obj)->length())->value() :
1635        elements->length();
1636    for (int i = 0; i < length; ++i) {
1637      if (!elements->get(i)->IsTheHole(isolate)) {
1638        SetElementReference(js_obj, entry, i, elements->get(i));
1639      }
1640    }
1641  } else if (js_obj->HasDictionaryElements()) {
1642    SeededNumberDictionary* dictionary = js_obj->element_dictionary();
1643    int length = dictionary->Capacity();
1644    for (int i = 0; i < length; ++i) {
1645      Object* k = dictionary->KeyAt(i);
1646      if (dictionary->IsKey(isolate, k)) {
1647        DCHECK(k->IsNumber());
1648        uint32_t index = static_cast<uint32_t>(k->Number());
1649        SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
1650      }
1651    }
1652  }
1653}
1654
1655
1656void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
1657  int length = js_obj->GetInternalFieldCount();
1658  for (int i = 0; i < length; ++i) {
1659    Object* o = js_obj->GetInternalField(i);
1660    SetInternalReference(
1661        js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
1662  }
1663}
1664
1665
1666String* V8HeapExplorer::GetConstructorName(JSObject* object) {
1667  Isolate* isolate = object->GetIsolate();
1668  if (object->IsJSFunction()) return isolate->heap()->closure_string();
1669  DisallowHeapAllocation no_gc;
1670  HandleScope scope(isolate);
1671  return *JSReceiver::GetConstructorName(handle(object, isolate));
1672}
1673
1674
1675HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
1676  if (!obj->IsHeapObject()) return NULL;
1677  return filler_->FindOrAddEntry(obj, this);
1678}
1679
1680
1681class RootsReferencesExtractor : public ObjectVisitor {
1682 private:
1683  struct IndexTag {
1684    IndexTag(int index, VisitorSynchronization::SyncTag tag)
1685        : index(index), tag(tag) { }
1686    int index;
1687    VisitorSynchronization::SyncTag tag;
1688  };
1689
1690 public:
1691  explicit RootsReferencesExtractor(Heap* heap)
1692      : collecting_all_references_(false),
1693        previous_reference_count_(0),
1694        heap_(heap) {
1695  }
1696
1697  void VisitPointers(Object** start, Object** end) override {
1698    if (collecting_all_references_) {
1699      for (Object** p = start; p < end; p++) all_references_.Add(*p);
1700    } else {
1701      for (Object** p = start; p < end; p++) strong_references_.Add(*p);
1702    }
1703  }
1704
1705  void SetCollectingAllReferences() { collecting_all_references_ = true; }
1706
1707  void FillReferences(V8HeapExplorer* explorer) {
1708    DCHECK(strong_references_.length() <= all_references_.length());
1709    Builtins* builtins = heap_->isolate()->builtins();
1710    int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0;
1711    while (all_index < all_references_.length()) {
1712      bool is_strong = strong_index < strong_references_.length()
1713          && strong_references_[strong_index] == all_references_[all_index];
1714      explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1715                                      !is_strong,
1716                                      all_references_[all_index]);
1717      if (reference_tags_[tags_index].tag ==
1718          VisitorSynchronization::kBuiltins) {
1719        DCHECK(all_references_[all_index]->IsCode());
1720        explorer->TagBuiltinCodeObject(
1721            Code::cast(all_references_[all_index]),
1722            builtins->name(builtin_index++));
1723      }
1724      ++all_index;
1725      if (is_strong) ++strong_index;
1726      if (reference_tags_[tags_index].index == all_index) ++tags_index;
1727    }
1728  }
1729
1730  void Synchronize(VisitorSynchronization::SyncTag tag) override {
1731    if (collecting_all_references_ &&
1732        previous_reference_count_ != all_references_.length()) {
1733      previous_reference_count_ = all_references_.length();
1734      reference_tags_.Add(IndexTag(previous_reference_count_, tag));
1735    }
1736  }
1737
1738 private:
1739  bool collecting_all_references_;
1740  List<Object*> strong_references_;
1741  List<Object*> all_references_;
1742  int previous_reference_count_;
1743  List<IndexTag> reference_tags_;
1744  Heap* heap_;
1745};
1746
1747
1748bool V8HeapExplorer::IterateAndExtractReferences(
1749    SnapshotFiller* filler) {
1750  filler_ = filler;
1751
1752  // Create references to the synthetic roots.
1753  SetRootGcRootsReference();
1754  for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
1755    SetGcRootsReference(static_cast<VisitorSynchronization::SyncTag>(tag));
1756  }
1757
1758  // Make sure builtin code objects get their builtin tags
1759  // first. Otherwise a particular JSFunction object could set
1760  // its custom name to a generic builtin.
1761  RootsReferencesExtractor extractor(heap_);
1762  heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
1763  extractor.SetCollectingAllReferences();
1764  heap_->IterateRoots(&extractor, VISIT_ALL);
1765  extractor.FillReferences(this);
1766
1767  // We have to do two passes as sometimes FixedArrays are used
1768  // to weakly hold their items, and it's impossible to distinguish
1769  // between these cases without processing the array owner first.
1770  bool interrupted =
1771      IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass1>() ||
1772      IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass2>();
1773
1774  if (interrupted) {
1775    filler_ = NULL;
1776    return false;
1777  }
1778
1779  filler_ = NULL;
1780  return progress_->ProgressReport(true);
1781}
1782
1783
1784template<V8HeapExplorer::ExtractReferencesMethod extractor>
1785bool V8HeapExplorer::IterateAndExtractSinglePass() {
1786  // Now iterate the whole heap.
1787  bool interrupted = false;
1788  HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
1789  // Heap iteration with filtering must be finished in any case.
1790  for (HeapObject* obj = iterator.next();
1791       obj != NULL;
1792       obj = iterator.next(), progress_->ProgressStep()) {
1793    if (interrupted) continue;
1794
1795    size_t max_pointer = obj->Size() / kPointerSize;
1796    if (max_pointer > marks_.size()) {
1797      // Clear the current bits.
1798      std::vector<bool>().swap(marks_);
1799      // Reallocate to right size.
1800      marks_.resize(max_pointer, false);
1801    }
1802
1803    HeapEntry* heap_entry = GetEntry(obj);
1804    int entry = heap_entry->index();
1805    if ((this->*extractor)(entry, obj)) {
1806      SetInternalReference(obj, entry,
1807                           "map", obj->map(), HeapObject::kMapOffset);
1808      // Extract unvisited fields as hidden references and restore tags
1809      // of visited fields.
1810      IndexedReferencesExtractor refs_extractor(this, obj, entry);
1811      obj->Iterate(&refs_extractor);
1812    }
1813
1814    if (!progress_->ProgressReport(false)) interrupted = true;
1815  }
1816  return interrupted;
1817}
1818
1819
1820bool V8HeapExplorer::IsEssentialObject(Object* object) {
1821  return object->IsHeapObject() && !object->IsOddball() &&
1822         object != heap_->empty_byte_array() &&
1823         object != heap_->empty_fixed_array() &&
1824         object != heap_->empty_descriptor_array() &&
1825         object != heap_->empty_type_feedback_vector() &&
1826         object != heap_->fixed_array_map() && object != heap_->cell_map() &&
1827         object != heap_->global_property_cell_map() &&
1828         object != heap_->shared_function_info_map() &&
1829         object != heap_->free_space_map() &&
1830         object != heap_->one_pointer_filler_map() &&
1831         object != heap_->two_pointer_filler_map();
1832}
1833
1834bool V8HeapExplorer::IsEssentialHiddenReference(Object* parent,
1835                                                int field_offset) {
1836  if (parent->IsAllocationSite() &&
1837      field_offset == AllocationSite::kWeakNextOffset)
1838    return false;
1839  if (parent->IsJSFunction() &&
1840      field_offset == JSFunction::kNextFunctionLinkOffset)
1841    return false;
1842  if (parent->IsCode() && field_offset == Code::kNextCodeLinkOffset)
1843    return false;
1844  if (parent->IsContext() &&
1845      field_offset == Context::OffsetOfElementAt(Context::NEXT_CONTEXT_LINK))
1846    return false;
1847  if (parent->IsWeakCell() && field_offset == WeakCell::kNextOffset)
1848    return false;
1849  return true;
1850}
1851
1852void V8HeapExplorer::SetContextReference(HeapObject* parent_obj,
1853                                         int parent_entry,
1854                                         String* reference_name,
1855                                         Object* child_obj,
1856                                         int field_offset) {
1857  DCHECK(parent_entry == GetEntry(parent_obj)->index());
1858  HeapEntry* child_entry = GetEntry(child_obj);
1859  if (child_entry != NULL) {
1860    filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
1861                               parent_entry,
1862                               names_->GetName(reference_name),
1863                               child_entry);
1864    MarkVisitedField(parent_obj, field_offset);
1865  }
1866}
1867
1868
1869void V8HeapExplorer::MarkVisitedField(HeapObject* obj, int offset) {
1870  if (offset < 0) return;
1871  int index = offset / kPointerSize;
1872  DCHECK(!marks_[index]);
1873  marks_[index] = true;
1874}
1875
1876
1877void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
1878                                            int parent_entry,
1879                                            const char* reference_name,
1880                                            Object* child_obj) {
1881  DCHECK(parent_entry == GetEntry(parent_obj)->index());
1882  HeapEntry* child_entry = GetEntry(child_obj);
1883  if (child_entry != NULL) {
1884    filler_->SetNamedReference(HeapGraphEdge::kShortcut,
1885                               parent_entry,
1886                               reference_name,
1887                               child_entry);
1888  }
1889}
1890
1891
1892void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
1893                                         int parent_entry,
1894                                         int index,
1895                                         Object* child_obj) {
1896  DCHECK(parent_entry == GetEntry(parent_obj)->index());
1897  HeapEntry* child_entry = GetEntry(child_obj);
1898  if (child_entry != NULL) {
1899    filler_->SetIndexedReference(HeapGraphEdge::kElement,
1900                                 parent_entry,
1901                                 index,
1902                                 child_entry);
1903  }
1904}
1905
1906
1907void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1908                                          int parent_entry,
1909                                          const char* reference_name,
1910                                          Object* child_obj,
1911                                          int field_offset) {
1912  DCHECK(parent_entry == GetEntry(parent_obj)->index());
1913  HeapEntry* child_entry = GetEntry(child_obj);
1914  if (child_entry == NULL) return;
1915  if (IsEssentialObject(child_obj)) {
1916    filler_->SetNamedReference(HeapGraphEdge::kInternal,
1917                               parent_entry,
1918                               reference_name,
1919                               child_entry);
1920  }
1921  MarkVisitedField(parent_obj, field_offset);
1922}
1923
1924
1925void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1926                                          int parent_entry,
1927                                          int index,
1928                                          Object* child_obj,
1929                                          int field_offset) {
1930  DCHECK(parent_entry == GetEntry(parent_obj)->index());
1931  HeapEntry* child_entry = GetEntry(child_obj);
1932  if (child_entry == NULL) return;
1933  if (IsEssentialObject(child_obj)) {
1934    filler_->SetNamedReference(HeapGraphEdge::kInternal,
1935                               parent_entry,
1936                               names_->GetName(index),
1937                               child_entry);
1938  }
1939  MarkVisitedField(parent_obj, field_offset);
1940}
1941
1942void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
1943                                        int parent_entry, int index,
1944                                        Object* child_obj, int field_offset) {
1945  DCHECK(parent_entry == GetEntry(parent_obj)->index());
1946  HeapEntry* child_entry = GetEntry(child_obj);
1947  if (child_entry != nullptr && IsEssentialObject(child_obj) &&
1948      IsEssentialHiddenReference(parent_obj, field_offset)) {
1949    filler_->SetIndexedReference(HeapGraphEdge::kHidden, parent_entry, index,
1950                                 child_entry);
1951  }
1952}
1953
1954
1955void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
1956                                      int parent_entry,
1957                                      const char* reference_name,
1958                                      Object* child_obj,
1959                                      int field_offset) {
1960  DCHECK(parent_entry == GetEntry(parent_obj)->index());
1961  HeapEntry* child_entry = GetEntry(child_obj);
1962  if (child_entry == NULL) return;
1963  if (IsEssentialObject(child_obj)) {
1964    filler_->SetNamedReference(HeapGraphEdge::kWeak,
1965                               parent_entry,
1966                               reference_name,
1967                               child_entry);
1968  }
1969  MarkVisitedField(parent_obj, field_offset);
1970}
1971
1972
1973void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
1974                                      int parent_entry,
1975                                      int index,
1976                                      Object* child_obj,
1977                                      int field_offset) {
1978  DCHECK(parent_entry == GetEntry(parent_obj)->index());
1979  HeapEntry* child_entry = GetEntry(child_obj);
1980  if (child_entry == NULL) return;
1981  if (IsEssentialObject(child_obj)) {
1982    filler_->SetNamedReference(HeapGraphEdge::kWeak,
1983                               parent_entry,
1984                               names_->GetFormatted("%d", index),
1985                               child_entry);
1986  }
1987  MarkVisitedField(parent_obj, field_offset);
1988}
1989
1990
1991void V8HeapExplorer::SetDataOrAccessorPropertyReference(
1992    PropertyKind kind, JSObject* parent_obj, int parent_entry,
1993    Name* reference_name, Object* child_obj, const char* name_format_string,
1994    int field_offset) {
1995  if (kind == kAccessor) {
1996    ExtractAccessorPairProperty(parent_obj, parent_entry, reference_name,
1997                                child_obj, field_offset);
1998  } else {
1999    SetPropertyReference(parent_obj, parent_entry, reference_name, child_obj,
2000                         name_format_string, field_offset);
2001  }
2002}
2003
2004
2005void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
2006                                          int parent_entry,
2007                                          Name* reference_name,
2008                                          Object* child_obj,
2009                                          const char* name_format_string,
2010                                          int field_offset) {
2011  DCHECK(parent_entry == GetEntry(parent_obj)->index());
2012  HeapEntry* child_entry = GetEntry(child_obj);
2013  if (child_entry != NULL) {
2014    HeapGraphEdge::Type type =
2015        reference_name->IsSymbol() || String::cast(reference_name)->length() > 0
2016            ? HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
2017    const char* name = name_format_string != NULL && reference_name->IsString()
2018        ? names_->GetFormatted(
2019              name_format_string,
2020              String::cast(reference_name)->ToCString(
2021                  DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL).get()) :
2022        names_->GetName(reference_name);
2023
2024    filler_->SetNamedReference(type,
2025                               parent_entry,
2026                               name,
2027                               child_entry);
2028    MarkVisitedField(parent_obj, field_offset);
2029  }
2030}
2031
2032
2033void V8HeapExplorer::SetRootGcRootsReference() {
2034  filler_->SetIndexedAutoIndexReference(
2035      HeapGraphEdge::kElement,
2036      snapshot_->root()->index(),
2037      snapshot_->gc_roots());
2038}
2039
2040
2041void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
2042  HeapEntry* child_entry = GetEntry(child_obj);
2043  DCHECK(child_entry != NULL);
2044  filler_->SetNamedAutoIndexReference(
2045      HeapGraphEdge::kShortcut,
2046      snapshot_->root()->index(),
2047      child_entry);
2048}
2049
2050
2051void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
2052  filler_->SetIndexedAutoIndexReference(
2053      HeapGraphEdge::kElement,
2054      snapshot_->gc_roots()->index(),
2055      snapshot_->gc_subroot(tag));
2056}
2057
2058
2059void V8HeapExplorer::SetGcSubrootReference(
2060    VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
2061  HeapEntry* child_entry = GetEntry(child_obj);
2062  if (child_entry != NULL) {
2063    const char* name = GetStrongGcSubrootName(child_obj);
2064    if (name != NULL) {
2065      filler_->SetNamedReference(
2066          HeapGraphEdge::kInternal,
2067          snapshot_->gc_subroot(tag)->index(),
2068          name,
2069          child_entry);
2070    } else {
2071      if (is_weak) {
2072        filler_->SetNamedAutoIndexReference(
2073            HeapGraphEdge::kWeak,
2074            snapshot_->gc_subroot(tag)->index(),
2075            child_entry);
2076      } else {
2077        filler_->SetIndexedAutoIndexReference(
2078            HeapGraphEdge::kElement,
2079            snapshot_->gc_subroot(tag)->index(),
2080            child_entry);
2081      }
2082    }
2083
2084    // Add a shortcut to JS global object reference at snapshot root.
2085    if (child_obj->IsNativeContext()) {
2086      Context* context = Context::cast(child_obj);
2087      JSGlobalObject* global = context->global_object();
2088      if (global->IsJSGlobalObject()) {
2089        bool is_debug_object = false;
2090        is_debug_object = heap_->isolate()->debug()->IsDebugGlobal(global);
2091        if (!is_debug_object && !user_roots_.Contains(global)) {
2092          user_roots_.Insert(global);
2093          SetUserGlobalReference(global);
2094        }
2095      }
2096    }
2097  }
2098}
2099
2100
2101const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
2102  if (strong_gc_subroot_names_.is_empty()) {
2103#define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2104#define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2105    STRONG_ROOT_LIST(ROOT_NAME)
2106#undef ROOT_NAME
2107#define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2108    STRUCT_LIST(STRUCT_MAP_NAME)
2109#undef STRUCT_MAP_NAME
2110#define STRING_NAME(name, str) NAME_ENTRY(name)
2111    INTERNALIZED_STRING_LIST(STRING_NAME)
2112#undef STRING_NAME
2113#define SYMBOL_NAME(name) NAME_ENTRY(name)
2114    PRIVATE_SYMBOL_LIST(SYMBOL_NAME)
2115#undef SYMBOL_NAME
2116#define SYMBOL_NAME(name, description) NAME_ENTRY(name)
2117    PUBLIC_SYMBOL_LIST(SYMBOL_NAME)
2118    WELL_KNOWN_SYMBOL_LIST(SYMBOL_NAME)
2119#undef SYMBOL_NAME
2120#undef NAME_ENTRY
2121    CHECK(!strong_gc_subroot_names_.is_empty());
2122  }
2123  return strong_gc_subroot_names_.GetTag(object);
2124}
2125
2126
2127void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
2128  if (IsEssentialObject(obj)) {
2129    HeapEntry* entry = GetEntry(obj);
2130    if (entry->name()[0] == '\0') {
2131      entry->set_name(tag);
2132    }
2133  }
2134}
2135
2136void V8HeapExplorer::TagFixedArraySubType(const FixedArray* array,
2137                                          FixedArraySubInstanceType type) {
2138  DCHECK(array_types_.find(array) == array_types_.end());
2139  array_types_[array] = type;
2140}
2141
2142class GlobalObjectsEnumerator : public ObjectVisitor {
2143 public:
2144  void VisitPointers(Object** start, Object** end) override {
2145    for (Object** p = start; p < end; p++) {
2146      if ((*p)->IsNativeContext()) {
2147        Context* context = Context::cast(*p);
2148        JSObject* proxy = context->global_proxy();
2149        if (proxy->IsJSGlobalProxy()) {
2150          Object* global = proxy->map()->prototype();
2151          if (global->IsJSGlobalObject()) {
2152            objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
2153          }
2154        }
2155      }
2156    }
2157  }
2158  int count() { return objects_.length(); }
2159  Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2160
2161 private:
2162  List<Handle<JSGlobalObject> > objects_;
2163};
2164
2165
2166// Modifies heap. Must not be run during heap traversal.
2167void V8HeapExplorer::TagGlobalObjects() {
2168  Isolate* isolate = heap_->isolate();
2169  HandleScope scope(isolate);
2170  GlobalObjectsEnumerator enumerator;
2171  isolate->global_handles()->IterateAllRoots(&enumerator);
2172  const char** urls = NewArray<const char*>(enumerator.count());
2173  for (int i = 0, l = enumerator.count(); i < l; ++i) {
2174    if (global_object_name_resolver_) {
2175      HandleScope scope(isolate);
2176      Handle<JSGlobalObject> global_obj = enumerator.at(i);
2177      urls[i] = global_object_name_resolver_->GetName(
2178          Utils::ToLocal(Handle<JSObject>::cast(global_obj)));
2179    } else {
2180      urls[i] = NULL;
2181    }
2182  }
2183
2184  DisallowHeapAllocation no_allocation;
2185  for (int i = 0, l = enumerator.count(); i < l; ++i) {
2186    objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2187  }
2188
2189  DeleteArray(urls);
2190}
2191
2192
2193class GlobalHandlesExtractor : public ObjectVisitor {
2194 public:
2195  explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
2196      : explorer_(explorer) {}
2197  ~GlobalHandlesExtractor() override {}
2198  void VisitPointers(Object** start, Object** end) override { UNREACHABLE(); }
2199  void VisitEmbedderReference(Object** p, uint16_t class_id) override {
2200    explorer_->VisitSubtreeWrapper(p, class_id);
2201  }
2202 private:
2203  NativeObjectsExplorer* explorer_;
2204};
2205
2206
2207class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
2208 public:
2209  BasicHeapEntriesAllocator(
2210      HeapSnapshot* snapshot,
2211      HeapEntry::Type entries_type)
2212    : snapshot_(snapshot),
2213      names_(snapshot_->profiler()->names()),
2214      heap_object_map_(snapshot_->profiler()->heap_object_map()),
2215      entries_type_(entries_type) {
2216  }
2217  virtual HeapEntry* AllocateEntry(HeapThing ptr);
2218 private:
2219  HeapSnapshot* snapshot_;
2220  StringsStorage* names_;
2221  HeapObjectsMap* heap_object_map_;
2222  HeapEntry::Type entries_type_;
2223};
2224
2225
2226HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
2227  v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2228  intptr_t elements = info->GetElementCount();
2229  intptr_t size = info->GetSizeInBytes();
2230  const char* name = elements != -1
2231                         ? names_->GetFormatted("%s / %" V8PRIdPTR " entries",
2232                                                info->GetLabel(), elements)
2233                         : names_->GetCopy(info->GetLabel());
2234  return snapshot_->AddEntry(
2235      entries_type_,
2236      name,
2237      heap_object_map_->GenerateId(info),
2238      size != -1 ? static_cast<int>(size) : 0,
2239      0);
2240}
2241
2242
2243NativeObjectsExplorer::NativeObjectsExplorer(
2244    HeapSnapshot* snapshot,
2245    SnapshottingProgressReportingInterface* progress)
2246    : isolate_(snapshot->profiler()->heap_object_map()->heap()->isolate()),
2247      snapshot_(snapshot),
2248      names_(snapshot_->profiler()->names()),
2249      embedder_queried_(false),
2250      objects_by_info_(RetainedInfosMatch),
2251      native_groups_(StringsMatch),
2252      filler_(NULL) {
2253  synthetic_entries_allocator_ =
2254      new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2255  native_entries_allocator_ =
2256      new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2257}
2258
2259
2260NativeObjectsExplorer::~NativeObjectsExplorer() {
2261  for (base::HashMap::Entry* p = objects_by_info_.Start(); p != NULL;
2262       p = objects_by_info_.Next(p)) {
2263    v8::RetainedObjectInfo* info =
2264        reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2265    info->Dispose();
2266    List<HeapObject*>* objects =
2267        reinterpret_cast<List<HeapObject*>* >(p->value);
2268    delete objects;
2269  }
2270  for (base::HashMap::Entry* p = native_groups_.Start(); p != NULL;
2271       p = native_groups_.Next(p)) {
2272    v8::RetainedObjectInfo* info =
2273        reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2274    info->Dispose();
2275  }
2276  delete synthetic_entries_allocator_;
2277  delete native_entries_allocator_;
2278}
2279
2280
2281int NativeObjectsExplorer::EstimateObjectsCount() {
2282  FillRetainedObjects();
2283  return objects_by_info_.occupancy();
2284}
2285
2286
2287void NativeObjectsExplorer::FillRetainedObjects() {
2288  if (embedder_queried_) return;
2289  Isolate* isolate = isolate_;
2290  const GCType major_gc_type = kGCTypeMarkSweepCompact;
2291  // Record objects that are joined into ObjectGroups.
2292  isolate->heap()->CallGCPrologueCallbacks(
2293      major_gc_type, kGCCallbackFlagConstructRetainedObjectInfos);
2294  List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2295  for (int i = 0; i < groups->length(); ++i) {
2296    ObjectGroup* group = groups->at(i);
2297    if (group->info == NULL) continue;
2298    List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info);
2299    for (size_t j = 0; j < group->length; ++j) {
2300      HeapObject* obj = HeapObject::cast(*group->objects[j]);
2301      list->Add(obj);
2302      in_groups_.Insert(obj);
2303    }
2304    group->info = NULL;  // Acquire info object ownership.
2305  }
2306  isolate->global_handles()->RemoveObjectGroups();
2307  isolate->heap()->CallGCEpilogueCallbacks(major_gc_type, kNoGCCallbackFlags);
2308  // Record objects that are not in ObjectGroups, but have class ID.
2309  GlobalHandlesExtractor extractor(this);
2310  isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2311  embedder_queried_ = true;
2312}
2313
2314
2315void NativeObjectsExplorer::FillImplicitReferences() {
2316  Isolate* isolate = isolate_;
2317  List<ImplicitRefGroup*>* groups =
2318      isolate->global_handles()->implicit_ref_groups();
2319  for (int i = 0; i < groups->length(); ++i) {
2320    ImplicitRefGroup* group = groups->at(i);
2321    HeapObject* parent = *group->parent;
2322    int parent_entry =
2323        filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
2324    DCHECK(parent_entry != HeapEntry::kNoEntry);
2325    Object*** children = group->children;
2326    for (size_t j = 0; j < group->length; ++j) {
2327      Object* child = *children[j];
2328      HeapEntry* child_entry =
2329          filler_->FindOrAddEntry(child, native_entries_allocator_);
2330      filler_->SetNamedReference(
2331          HeapGraphEdge::kInternal,
2332          parent_entry,
2333          "native",
2334          child_entry);
2335    }
2336  }
2337  isolate->global_handles()->RemoveImplicitRefGroups();
2338}
2339
2340List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2341    v8::RetainedObjectInfo* info) {
2342  base::HashMap::Entry* entry =
2343      objects_by_info_.LookupOrInsert(info, InfoHash(info));
2344  if (entry->value != NULL) {
2345    info->Dispose();
2346  } else {
2347    entry->value = new List<HeapObject*>(4);
2348  }
2349  return reinterpret_cast<List<HeapObject*>* >(entry->value);
2350}
2351
2352
2353bool NativeObjectsExplorer::IterateAndExtractReferences(
2354    SnapshotFiller* filler) {
2355  filler_ = filler;
2356  FillRetainedObjects();
2357  FillImplicitReferences();
2358  if (EstimateObjectsCount() > 0) {
2359    for (base::HashMap::Entry* p = objects_by_info_.Start(); p != NULL;
2360         p = objects_by_info_.Next(p)) {
2361      v8::RetainedObjectInfo* info =
2362          reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2363      SetNativeRootReference(info);
2364      List<HeapObject*>* objects =
2365          reinterpret_cast<List<HeapObject*>* >(p->value);
2366      for (int i = 0; i < objects->length(); ++i) {
2367        SetWrapperNativeReferences(objects->at(i), info);
2368      }
2369    }
2370    SetRootNativeRootsReference();
2371  }
2372  filler_ = NULL;
2373  return true;
2374}
2375
2376
2377class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
2378 public:
2379  explicit NativeGroupRetainedObjectInfo(const char* label)
2380      : disposed_(false),
2381        hash_(reinterpret_cast<intptr_t>(label)),
2382        label_(label) {
2383  }
2384
2385  virtual ~NativeGroupRetainedObjectInfo() {}
2386  virtual void Dispose() {
2387    CHECK(!disposed_);
2388    disposed_ = true;
2389    delete this;
2390  }
2391  virtual bool IsEquivalent(RetainedObjectInfo* other) {
2392    return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2393  }
2394  virtual intptr_t GetHash() { return hash_; }
2395  virtual const char* GetLabel() { return label_; }
2396
2397 private:
2398  bool disposed_;
2399  intptr_t hash_;
2400  const char* label_;
2401};
2402
2403
2404NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2405    const char* label) {
2406  const char* label_copy = names_->GetCopy(label);
2407  uint32_t hash = StringHasher::HashSequentialString(
2408      label_copy,
2409      static_cast<int>(strlen(label_copy)),
2410      isolate_->heap()->HashSeed());
2411  base::HashMap::Entry* entry =
2412      native_groups_.LookupOrInsert(const_cast<char*>(label_copy), hash);
2413  if (entry->value == NULL) {
2414    entry->value = new NativeGroupRetainedObjectInfo(label);
2415  }
2416  return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2417}
2418
2419
2420void NativeObjectsExplorer::SetNativeRootReference(
2421    v8::RetainedObjectInfo* info) {
2422  HeapEntry* child_entry =
2423      filler_->FindOrAddEntry(info, native_entries_allocator_);
2424  DCHECK(child_entry != NULL);
2425  NativeGroupRetainedObjectInfo* group_info =
2426      FindOrAddGroupInfo(info->GetGroupLabel());
2427  HeapEntry* group_entry =
2428      filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2429  // |FindOrAddEntry| can move and resize the entries backing store. Reload
2430  // potentially-stale pointer.
2431  child_entry = filler_->FindEntry(info);
2432  filler_->SetNamedAutoIndexReference(
2433      HeapGraphEdge::kInternal,
2434      group_entry->index(),
2435      child_entry);
2436}
2437
2438
2439void NativeObjectsExplorer::SetWrapperNativeReferences(
2440    HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2441  HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2442  DCHECK(wrapper_entry != NULL);
2443  HeapEntry* info_entry =
2444      filler_->FindOrAddEntry(info, native_entries_allocator_);
2445  DCHECK(info_entry != NULL);
2446  filler_->SetNamedReference(HeapGraphEdge::kInternal,
2447                             wrapper_entry->index(),
2448                             "native",
2449                             info_entry);
2450  filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2451                                        info_entry->index(),
2452                                        wrapper_entry);
2453}
2454
2455
2456void NativeObjectsExplorer::SetRootNativeRootsReference() {
2457  for (base::HashMap::Entry* entry = native_groups_.Start(); entry;
2458       entry = native_groups_.Next(entry)) {
2459    NativeGroupRetainedObjectInfo* group_info =
2460        static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2461    HeapEntry* group_entry =
2462        filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2463    DCHECK(group_entry != NULL);
2464    filler_->SetIndexedAutoIndexReference(
2465        HeapGraphEdge::kElement,
2466        snapshot_->root()->index(),
2467        group_entry);
2468  }
2469}
2470
2471
2472void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2473  if (in_groups_.Contains(*p)) return;
2474  Isolate* isolate = isolate_;
2475  v8::RetainedObjectInfo* info =
2476      isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2477  if (info == NULL) return;
2478  GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2479}
2480
2481
2482HeapSnapshotGenerator::HeapSnapshotGenerator(
2483    HeapSnapshot* snapshot,
2484    v8::ActivityControl* control,
2485    v8::HeapProfiler::ObjectNameResolver* resolver,
2486    Heap* heap)
2487    : snapshot_(snapshot),
2488      control_(control),
2489      v8_heap_explorer_(snapshot_, this, resolver),
2490      dom_explorer_(snapshot_, this),
2491      heap_(heap) {
2492}
2493
2494
2495bool HeapSnapshotGenerator::GenerateSnapshot() {
2496  v8_heap_explorer_.TagGlobalObjects();
2497
2498  // TODO(1562) Profiler assumes that any object that is in the heap after
2499  // full GC is reachable from the root when computing dominators.
2500  // This is not true for weakly reachable objects.
2501  // As a temporary solution we call GC twice.
2502  heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
2503                           GarbageCollectionReason::kHeapProfiler);
2504  heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
2505                           GarbageCollectionReason::kHeapProfiler);
2506
2507#ifdef VERIFY_HEAP
2508  Heap* debug_heap = heap_;
2509  if (FLAG_verify_heap) {
2510    debug_heap->Verify();
2511  }
2512#endif
2513
2514  SetProgressTotal(2);  // 2 passes.
2515
2516#ifdef VERIFY_HEAP
2517  if (FLAG_verify_heap) {
2518    debug_heap->Verify();
2519  }
2520#endif
2521
2522  snapshot_->AddSyntheticRootEntries();
2523
2524  if (!FillReferences()) return false;
2525
2526  snapshot_->FillChildren();
2527  snapshot_->RememberLastJSObjectId();
2528
2529  progress_counter_ = progress_total_;
2530  if (!ProgressReport(true)) return false;
2531  return true;
2532}
2533
2534
2535void HeapSnapshotGenerator::ProgressStep() {
2536  ++progress_counter_;
2537}
2538
2539
2540bool HeapSnapshotGenerator::ProgressReport(bool force) {
2541  const int kProgressReportGranularity = 10000;
2542  if (control_ != NULL
2543      && (force || progress_counter_ % kProgressReportGranularity == 0)) {
2544      return
2545          control_->ReportProgressValue(progress_counter_, progress_total_) ==
2546          v8::ActivityControl::kContinue;
2547  }
2548  return true;
2549}
2550
2551
2552void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
2553  if (control_ == NULL) return;
2554  HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
2555  progress_total_ = iterations_count * (
2556      v8_heap_explorer_.EstimateObjectsCount(&iterator) +
2557      dom_explorer_.EstimateObjectsCount());
2558  progress_counter_ = 0;
2559}
2560
2561
2562bool HeapSnapshotGenerator::FillReferences() {
2563  SnapshotFiller filler(snapshot_, &entries_);
2564  return v8_heap_explorer_.IterateAndExtractReferences(&filler)
2565      && dom_explorer_.IterateAndExtractReferences(&filler);
2566}
2567
2568
2569template<int bytes> struct MaxDecimalDigitsIn;
2570template<> struct MaxDecimalDigitsIn<4> {
2571  static const int kSigned = 11;
2572  static const int kUnsigned = 10;
2573};
2574template<> struct MaxDecimalDigitsIn<8> {
2575  static const int kSigned = 20;
2576  static const int kUnsigned = 20;
2577};
2578
2579
2580class OutputStreamWriter {
2581 public:
2582  explicit OutputStreamWriter(v8::OutputStream* stream)
2583      : stream_(stream),
2584        chunk_size_(stream->GetChunkSize()),
2585        chunk_(chunk_size_),
2586        chunk_pos_(0),
2587        aborted_(false) {
2588    DCHECK(chunk_size_ > 0);
2589  }
2590  bool aborted() { return aborted_; }
2591  void AddCharacter(char c) {
2592    DCHECK(c != '\0');
2593    DCHECK(chunk_pos_ < chunk_size_);
2594    chunk_[chunk_pos_++] = c;
2595    MaybeWriteChunk();
2596  }
2597  void AddString(const char* s) {
2598    AddSubstring(s, StrLength(s));
2599  }
2600  void AddSubstring(const char* s, int n) {
2601    if (n <= 0) return;
2602    DCHECK(static_cast<size_t>(n) <= strlen(s));
2603    const char* s_end = s + n;
2604    while (s < s_end) {
2605      int s_chunk_size =
2606          Min(chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2607      DCHECK(s_chunk_size > 0);
2608      MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size);
2609      s += s_chunk_size;
2610      chunk_pos_ += s_chunk_size;
2611      MaybeWriteChunk();
2612    }
2613  }
2614  void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
2615  void Finalize() {
2616    if (aborted_) return;
2617    DCHECK(chunk_pos_ < chunk_size_);
2618    if (chunk_pos_ != 0) {
2619      WriteChunk();
2620    }
2621    stream_->EndOfStream();
2622  }
2623
2624 private:
2625  template<typename T>
2626  void AddNumberImpl(T n, const char* format) {
2627    // Buffer for the longest value plus trailing \0
2628    static const int kMaxNumberSize =
2629        MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
2630    if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2631      int result = SNPrintF(
2632          chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
2633      DCHECK(result != -1);
2634      chunk_pos_ += result;
2635      MaybeWriteChunk();
2636    } else {
2637      EmbeddedVector<char, kMaxNumberSize> buffer;
2638      int result = SNPrintF(buffer, format, n);
2639      USE(result);
2640      DCHECK(result != -1);
2641      AddString(buffer.start());
2642    }
2643  }
2644  void MaybeWriteChunk() {
2645    DCHECK(chunk_pos_ <= chunk_size_);
2646    if (chunk_pos_ == chunk_size_) {
2647      WriteChunk();
2648    }
2649  }
2650  void WriteChunk() {
2651    if (aborted_) return;
2652    if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
2653        v8::OutputStream::kAbort) aborted_ = true;
2654    chunk_pos_ = 0;
2655  }
2656
2657  v8::OutputStream* stream_;
2658  int chunk_size_;
2659  ScopedVector<char> chunk_;
2660  int chunk_pos_;
2661  bool aborted_;
2662};
2663
2664
2665// type, name|index, to_node.
2666const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2667// type, name, id, self_size, edge_count, trace_node_id.
2668const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 6;
2669
2670void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
2671  if (AllocationTracker* allocation_tracker =
2672      snapshot_->profiler()->allocation_tracker()) {
2673    allocation_tracker->PrepareForSerialization();
2674  }
2675  DCHECK(writer_ == NULL);
2676  writer_ = new OutputStreamWriter(stream);
2677  SerializeImpl();
2678  delete writer_;
2679  writer_ = NULL;
2680}
2681
2682
2683void HeapSnapshotJSONSerializer::SerializeImpl() {
2684  DCHECK(0 == snapshot_->root()->index());
2685  writer_->AddCharacter('{');
2686  writer_->AddString("\"snapshot\":{");
2687  SerializeSnapshot();
2688  if (writer_->aborted()) return;
2689  writer_->AddString("},\n");
2690  writer_->AddString("\"nodes\":[");
2691  SerializeNodes();
2692  if (writer_->aborted()) return;
2693  writer_->AddString("],\n");
2694  writer_->AddString("\"edges\":[");
2695  SerializeEdges();
2696  if (writer_->aborted()) return;
2697  writer_->AddString("],\n");
2698
2699  writer_->AddString("\"trace_function_infos\":[");
2700  SerializeTraceNodeInfos();
2701  if (writer_->aborted()) return;
2702  writer_->AddString("],\n");
2703  writer_->AddString("\"trace_tree\":[");
2704  SerializeTraceTree();
2705  if (writer_->aborted()) return;
2706  writer_->AddString("],\n");
2707
2708  writer_->AddString("\"samples\":[");
2709  SerializeSamples();
2710  if (writer_->aborted()) return;
2711  writer_->AddString("],\n");
2712
2713  writer_->AddString("\"strings\":[");
2714  SerializeStrings();
2715  if (writer_->aborted()) return;
2716  writer_->AddCharacter(']');
2717  writer_->AddCharacter('}');
2718  writer_->Finalize();
2719}
2720
2721
2722int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
2723  base::HashMap::Entry* cache_entry =
2724      strings_.LookupOrInsert(const_cast<char*>(s), StringHash(s));
2725  if (cache_entry->value == NULL) {
2726    cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2727  }
2728  return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2729}
2730
2731
2732namespace {
2733
2734template<size_t size> struct ToUnsigned;
2735
2736template<> struct ToUnsigned<4> {
2737  typedef uint32_t Type;
2738};
2739
2740template<> struct ToUnsigned<8> {
2741  typedef uint64_t Type;
2742};
2743
2744}  // namespace
2745
2746
2747template<typename T>
2748static int utoa_impl(T value, const Vector<char>& buffer, int buffer_pos) {
2749  STATIC_ASSERT(static_cast<T>(-1) > 0);  // Check that T is unsigned
2750  int number_of_digits = 0;
2751  T t = value;
2752  do {
2753    ++number_of_digits;
2754  } while (t /= 10);
2755
2756  buffer_pos += number_of_digits;
2757  int result = buffer_pos;
2758  do {
2759    int last_digit = static_cast<int>(value % 10);
2760    buffer[--buffer_pos] = '0' + last_digit;
2761    value /= 10;
2762  } while (value);
2763  return result;
2764}
2765
2766
2767template<typename T>
2768static int utoa(T value, const Vector<char>& buffer, int buffer_pos) {
2769  typename ToUnsigned<sizeof(value)>::Type unsigned_value = value;
2770  STATIC_ASSERT(sizeof(value) == sizeof(unsigned_value));
2771  return utoa_impl(unsigned_value, buffer, buffer_pos);
2772}
2773
2774
2775void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2776                                               bool first_edge) {
2777  // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2778  static const int kBufferSize =
2779      MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2;  // NOLINT
2780  EmbeddedVector<char, kBufferSize> buffer;
2781  int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2782      || edge->type() == HeapGraphEdge::kHidden
2783      ? edge->index() : GetStringId(edge->name());
2784  int buffer_pos = 0;
2785  if (!first_edge) {
2786    buffer[buffer_pos++] = ',';
2787  }
2788  buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2789  buffer[buffer_pos++] = ',';
2790  buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2791  buffer[buffer_pos++] = ',';
2792  buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
2793  buffer[buffer_pos++] = '\n';
2794  buffer[buffer_pos++] = '\0';
2795  writer_->AddString(buffer.start());
2796}
2797
2798
2799void HeapSnapshotJSONSerializer::SerializeEdges() {
2800  List<HeapGraphEdge*>& edges = snapshot_->children();
2801  for (int i = 0; i < edges.length(); ++i) {
2802    DCHECK(i == 0 ||
2803           edges[i - 1]->from()->index() <= edges[i]->from()->index());
2804    SerializeEdge(edges[i], i == 0);
2805    if (writer_->aborted()) return;
2806  }
2807}
2808
2809
2810void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
2811  // The buffer needs space for 4 unsigned ints, 1 size_t, 5 commas, \n and \0
2812  static const int kBufferSize =
2813      5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned  // NOLINT
2814      + MaxDecimalDigitsIn<sizeof(size_t)>::kUnsigned  // NOLINT
2815      + 6 + 1 + 1;
2816  EmbeddedVector<char, kBufferSize> buffer;
2817  int buffer_pos = 0;
2818  if (entry_index(entry) != 0) {
2819    buffer[buffer_pos++] = ',';
2820  }
2821  buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2822  buffer[buffer_pos++] = ',';
2823  buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2824  buffer[buffer_pos++] = ',';
2825  buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2826  buffer[buffer_pos++] = ',';
2827  buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2828  buffer[buffer_pos++] = ',';
2829  buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2830  buffer[buffer_pos++] = ',';
2831  buffer_pos = utoa(entry->trace_node_id(), buffer, buffer_pos);
2832  buffer[buffer_pos++] = '\n';
2833  buffer[buffer_pos++] = '\0';
2834  writer_->AddString(buffer.start());
2835}
2836
2837
2838void HeapSnapshotJSONSerializer::SerializeNodes() {
2839  List<HeapEntry>& entries = snapshot_->entries();
2840  for (int i = 0; i < entries.length(); ++i) {
2841    SerializeNode(&entries[i]);
2842    if (writer_->aborted()) return;
2843  }
2844}
2845
2846
2847void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2848  writer_->AddString("\"meta\":");
2849  // The object describing node serialization layout.
2850  // We use a set of macros to improve readability.
2851#define JSON_A(s) "[" s "]"
2852#define JSON_O(s) "{" s "}"
2853#define JSON_S(s) "\"" s "\""
2854  writer_->AddString(JSON_O(
2855    JSON_S("node_fields") ":" JSON_A(
2856        JSON_S("type") ","
2857        JSON_S("name") ","
2858        JSON_S("id") ","
2859        JSON_S("self_size") ","
2860        JSON_S("edge_count") ","
2861        JSON_S("trace_node_id")) ","
2862    JSON_S("node_types") ":" JSON_A(
2863        JSON_A(
2864            JSON_S("hidden") ","
2865            JSON_S("array") ","
2866            JSON_S("string") ","
2867            JSON_S("object") ","
2868            JSON_S("code") ","
2869            JSON_S("closure") ","
2870            JSON_S("regexp") ","
2871            JSON_S("number") ","
2872            JSON_S("native") ","
2873            JSON_S("synthetic") ","
2874            JSON_S("concatenated string") ","
2875            JSON_S("sliced string")) ","
2876        JSON_S("string") ","
2877        JSON_S("number") ","
2878        JSON_S("number") ","
2879        JSON_S("number") ","
2880        JSON_S("number") ","
2881        JSON_S("number")) ","
2882    JSON_S("edge_fields") ":" JSON_A(
2883        JSON_S("type") ","
2884        JSON_S("name_or_index") ","
2885        JSON_S("to_node")) ","
2886    JSON_S("edge_types") ":" JSON_A(
2887        JSON_A(
2888            JSON_S("context") ","
2889            JSON_S("element") ","
2890            JSON_S("property") ","
2891            JSON_S("internal") ","
2892            JSON_S("hidden") ","
2893            JSON_S("shortcut") ","
2894            JSON_S("weak")) ","
2895        JSON_S("string_or_number") ","
2896        JSON_S("node")) ","
2897    JSON_S("trace_function_info_fields") ":" JSON_A(
2898        JSON_S("function_id") ","
2899        JSON_S("name") ","
2900        JSON_S("script_name") ","
2901        JSON_S("script_id") ","
2902        JSON_S("line") ","
2903        JSON_S("column")) ","
2904    JSON_S("trace_node_fields") ":" JSON_A(
2905        JSON_S("id") ","
2906        JSON_S("function_info_index") ","
2907        JSON_S("count") ","
2908        JSON_S("size") ","
2909        JSON_S("children")) ","
2910    JSON_S("sample_fields") ":" JSON_A(
2911        JSON_S("timestamp_us") ","
2912        JSON_S("last_assigned_id"))));
2913#undef JSON_S
2914#undef JSON_O
2915#undef JSON_A
2916  writer_->AddString(",\"node_count\":");
2917  writer_->AddNumber(snapshot_->entries().length());
2918  writer_->AddString(",\"edge_count\":");
2919  writer_->AddNumber(snapshot_->edges().length());
2920  writer_->AddString(",\"trace_function_count\":");
2921  uint32_t count = 0;
2922  AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2923  if (tracker) {
2924    count = tracker->function_info_list().length();
2925  }
2926  writer_->AddNumber(count);
2927}
2928
2929
2930static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
2931  static const char hex_chars[] = "0123456789ABCDEF";
2932  w->AddString("\\u");
2933  w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
2934  w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
2935  w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
2936  w->AddCharacter(hex_chars[u & 0xf]);
2937}
2938
2939
2940void HeapSnapshotJSONSerializer::SerializeTraceTree() {
2941  AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2942  if (!tracker) return;
2943  AllocationTraceTree* traces = tracker->trace_tree();
2944  SerializeTraceNode(traces->root());
2945}
2946
2947
2948void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) {
2949  // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0
2950  const int kBufferSize =
2951      4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned  // NOLINT
2952      + 4 + 1 + 1;
2953  EmbeddedVector<char, kBufferSize> buffer;
2954  int buffer_pos = 0;
2955  buffer_pos = utoa(node->id(), buffer, buffer_pos);
2956  buffer[buffer_pos++] = ',';
2957  buffer_pos = utoa(node->function_info_index(), buffer, buffer_pos);
2958  buffer[buffer_pos++] = ',';
2959  buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
2960  buffer[buffer_pos++] = ',';
2961  buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
2962  buffer[buffer_pos++] = ',';
2963  buffer[buffer_pos++] = '[';
2964  buffer[buffer_pos++] = '\0';
2965  writer_->AddString(buffer.start());
2966
2967  Vector<AllocationTraceNode*> children = node->children();
2968  for (int i = 0; i < children.length(); i++) {
2969    if (i > 0) {
2970      writer_->AddCharacter(',');
2971    }
2972    SerializeTraceNode(children[i]);
2973  }
2974  writer_->AddCharacter(']');
2975}
2976
2977
2978// 0-based position is converted to 1-based during the serialization.
2979static int SerializePosition(int position, const Vector<char>& buffer,
2980                             int buffer_pos) {
2981  if (position == -1) {
2982    buffer[buffer_pos++] = '0';
2983  } else {
2984    DCHECK(position >= 0);
2985    buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
2986  }
2987  return buffer_pos;
2988}
2989
2990
2991void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() {
2992  AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2993  if (!tracker) return;
2994  // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0
2995  const int kBufferSize =
2996      6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned  // NOLINT
2997      + 6 + 1 + 1;
2998  EmbeddedVector<char, kBufferSize> buffer;
2999  const List<AllocationTracker::FunctionInfo*>& list =
3000      tracker->function_info_list();
3001  for (int i = 0; i < list.length(); i++) {
3002    AllocationTracker::FunctionInfo* info = list[i];
3003    int buffer_pos = 0;
3004    if (i > 0) {
3005      buffer[buffer_pos++] = ',';
3006    }
3007    buffer_pos = utoa(info->function_id, buffer, buffer_pos);
3008    buffer[buffer_pos++] = ',';
3009    buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
3010    buffer[buffer_pos++] = ',';
3011    buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
3012    buffer[buffer_pos++] = ',';
3013    // The cast is safe because script id is a non-negative Smi.
3014    buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
3015        buffer_pos);
3016    buffer[buffer_pos++] = ',';
3017    buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
3018    buffer[buffer_pos++] = ',';
3019    buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
3020    buffer[buffer_pos++] = '\n';
3021    buffer[buffer_pos++] = '\0';
3022    writer_->AddString(buffer.start());
3023  }
3024}
3025
3026
3027void HeapSnapshotJSONSerializer::SerializeSamples() {
3028  const List<HeapObjectsMap::TimeInterval>& samples =
3029      snapshot_->profiler()->heap_object_map()->samples();
3030  if (samples.is_empty()) return;
3031  base::TimeTicks start_time = samples[0].timestamp;
3032  // The buffer needs space for 2 unsigned ints, 2 commas, \n and \0
3033  const int kBufferSize = MaxDecimalDigitsIn<sizeof(
3034                              base::TimeDelta().InMicroseconds())>::kUnsigned +
3035                          MaxDecimalDigitsIn<sizeof(samples[0].id)>::kUnsigned +
3036                          2 + 1 + 1;
3037  EmbeddedVector<char, kBufferSize> buffer;
3038  for (int i = 0; i < samples.length(); i++) {
3039    HeapObjectsMap::TimeInterval& sample = samples[i];
3040    int buffer_pos = 0;
3041    if (i > 0) {
3042      buffer[buffer_pos++] = ',';
3043    }
3044    base::TimeDelta time_delta = sample.timestamp - start_time;
3045    buffer_pos = utoa(time_delta.InMicroseconds(), buffer, buffer_pos);
3046    buffer[buffer_pos++] = ',';
3047    buffer_pos = utoa(sample.last_assigned_id(), buffer, buffer_pos);
3048    buffer[buffer_pos++] = '\n';
3049    buffer[buffer_pos++] = '\0';
3050    writer_->AddString(buffer.start());
3051  }
3052}
3053
3054
3055void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3056  writer_->AddCharacter('\n');
3057  writer_->AddCharacter('\"');
3058  for ( ; *s != '\0'; ++s) {
3059    switch (*s) {
3060      case '\b':
3061        writer_->AddString("\\b");
3062        continue;
3063      case '\f':
3064        writer_->AddString("\\f");
3065        continue;
3066      case '\n':
3067        writer_->AddString("\\n");
3068        continue;
3069      case '\r':
3070        writer_->AddString("\\r");
3071        continue;
3072      case '\t':
3073        writer_->AddString("\\t");
3074        continue;
3075      case '\"':
3076      case '\\':
3077        writer_->AddCharacter('\\');
3078        writer_->AddCharacter(*s);
3079        continue;
3080      default:
3081        if (*s > 31 && *s < 128) {
3082          writer_->AddCharacter(*s);
3083        } else if (*s <= 31) {
3084          // Special character with no dedicated literal.
3085          WriteUChar(writer_, *s);
3086        } else {
3087          // Convert UTF-8 into \u UTF-16 literal.
3088          size_t length = 1, cursor = 0;
3089          for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3090          unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3091          if (c != unibrow::Utf8::kBadChar) {
3092            WriteUChar(writer_, c);
3093            DCHECK(cursor != 0);
3094            s += cursor - 1;
3095          } else {
3096            writer_->AddCharacter('?');
3097          }
3098        }
3099    }
3100  }
3101  writer_->AddCharacter('\"');
3102}
3103
3104
3105void HeapSnapshotJSONSerializer::SerializeStrings() {
3106  ScopedVector<const unsigned char*> sorted_strings(
3107      strings_.occupancy() + 1);
3108  for (base::HashMap::Entry* entry = strings_.Start(); entry != NULL;
3109       entry = strings_.Next(entry)) {
3110    int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value));
3111    sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key);
3112  }
3113  writer_->AddString("\"<dummy>\"");
3114  for (int i = 1; i < sorted_strings.length(); ++i) {
3115    writer_->AddCharacter(',');
3116    SerializeString(sorted_strings[i]);
3117    if (writer_->aborted()) return;
3118  }
3119}
3120
3121
3122}  // namespace internal
3123}  // namespace v8
3124