profile-generator.h revision 3bec4d28b1f388dbc06a9c4276e1a03e86c52b04
1// Copyright 2010 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_PROFILE_GENERATOR_H_
29#define V8_PROFILE_GENERATOR_H_
30
31#ifdef ENABLE_LOGGING_AND_PROFILING
32
33#include "hashmap.h"
34#include "../include/v8-profiler.h"
35
36namespace v8 {
37namespace internal {
38
39class TokenEnumerator {
40 public:
41  TokenEnumerator();
42  ~TokenEnumerator();
43  int GetTokenId(Object* token);
44
45  static const int kNoSecurityToken = -1;
46  static const int kInheritsSecurityToken = -2;
47
48 private:
49  static void TokenRemovedCallback(v8::Persistent<v8::Value> handle,
50                                   void* parameter);
51  void TokenRemoved(Object** token_location);
52
53  List<Object**> token_locations_;
54  List<bool> token_removed_;
55
56  friend class TokenEnumeratorTester;
57
58  DISALLOW_COPY_AND_ASSIGN(TokenEnumerator);
59};
60
61
62// Provides a storage of strings allocated in C++ heap, to hold them
63// forever, even if they disappear from JS heap or external storage.
64class StringsStorage {
65 public:
66  StringsStorage();
67  ~StringsStorage();
68
69  const char* GetName(String* name);
70
71 private:
72  INLINE(static bool StringsMatch(void* key1, void* key2)) {
73    return strcmp(reinterpret_cast<char*>(key1),
74                  reinterpret_cast<char*>(key2)) == 0;
75  }
76
77  // Mapping of strings by String::Hash to const char* strings.
78  HashMap names_;
79
80  DISALLOW_COPY_AND_ASSIGN(StringsStorage);
81};
82
83
84class CodeEntry {
85 public:
86  explicit INLINE(CodeEntry(int security_token_id));
87  // CodeEntry doesn't own name strings, just references them.
88  INLINE(CodeEntry(Logger::LogEventsAndTags tag,
89                   const char* name_prefix,
90                   const char* name,
91                   const char* resource_name,
92                   int line_number,
93                   int security_token_id));
94
95  INLINE(bool is_js_function() const) { return is_js_function_tag(tag_); }
96  INLINE(const char* name_prefix() const) { return name_prefix_; }
97  INLINE(bool has_name_prefix() const) { return name_prefix_[0] != '\0'; }
98  INLINE(const char* name() const) { return name_; }
99  INLINE(const char* resource_name() const) { return resource_name_; }
100  INLINE(int line_number() const) { return line_number_; }
101  INLINE(unsigned call_uid() const) { return call_uid_; }
102  INLINE(int security_token_id() const) { return security_token_id_; }
103
104  INLINE(static bool is_js_function_tag(Logger::LogEventsAndTags tag));
105
106  void CopyData(const CodeEntry& source);
107
108  static const char* kEmptyNamePrefix;
109
110 private:
111  unsigned call_uid_;
112  Logger::LogEventsAndTags tag_;
113  const char* name_prefix_;
114  const char* name_;
115  const char* resource_name_;
116  int line_number_;
117  int security_token_id_;
118
119  static unsigned next_call_uid_;
120
121  DISALLOW_COPY_AND_ASSIGN(CodeEntry);
122};
123
124
125class ProfileTree;
126
127class ProfileNode {
128 public:
129  INLINE(ProfileNode(ProfileTree* tree, CodeEntry* entry));
130
131  ProfileNode* FindChild(CodeEntry* entry);
132  ProfileNode* FindOrAddChild(CodeEntry* entry);
133  INLINE(void IncrementSelfTicks()) { ++self_ticks_; }
134  INLINE(void IncreaseSelfTicks(unsigned amount)) { self_ticks_ += amount; }
135  INLINE(void IncreaseTotalTicks(unsigned amount)) { total_ticks_ += amount; }
136
137  INLINE(CodeEntry* entry() const) { return entry_; }
138  INLINE(unsigned self_ticks() const) { return self_ticks_; }
139  INLINE(unsigned total_ticks() const) { return total_ticks_; }
140  INLINE(const List<ProfileNode*>* children() const) { return &children_list_; }
141  double GetSelfMillis() const;
142  double GetTotalMillis() const;
143
144  void Print(int indent);
145
146 private:
147  INLINE(static bool CodeEntriesMatch(void* entry1, void* entry2)) {
148    return entry1 == entry2;
149  }
150
151  INLINE(static uint32_t CodeEntryHash(CodeEntry* entry)) {
152    return static_cast<int32_t>(reinterpret_cast<intptr_t>(entry));
153  }
154
155  ProfileTree* tree_;
156  CodeEntry* entry_;
157  unsigned total_ticks_;
158  unsigned self_ticks_;
159  // Mapping from CodeEntry* to ProfileNode*
160  HashMap children_;
161  List<ProfileNode*> children_list_;
162
163  DISALLOW_COPY_AND_ASSIGN(ProfileNode);
164};
165
166
167class ProfileTree {
168 public:
169  ProfileTree();
170  ~ProfileTree();
171
172  void AddPathFromEnd(const Vector<CodeEntry*>& path);
173  void AddPathFromStart(const Vector<CodeEntry*>& path);
174  void CalculateTotalTicks();
175  void FilteredClone(ProfileTree* src, int security_token_id);
176
177  double TicksToMillis(unsigned ticks) const {
178    return ticks * ms_to_ticks_scale_;
179  }
180  ProfileNode* root() const { return root_; }
181  void SetTickRatePerMs(double ticks_per_ms);
182
183  void ShortPrint();
184  void Print() {
185    root_->Print(0);
186  }
187
188 private:
189  template <typename Callback>
190  void TraverseDepthFirst(Callback* callback);
191
192  CodeEntry root_entry_;
193  ProfileNode* root_;
194  double ms_to_ticks_scale_;
195
196  DISALLOW_COPY_AND_ASSIGN(ProfileTree);
197};
198
199
200class CpuProfile {
201 public:
202  CpuProfile(const char* title, unsigned uid)
203      : title_(title), uid_(uid) { }
204
205  // Add pc -> ... -> main() call path to the profile.
206  void AddPath(const Vector<CodeEntry*>& path);
207  void CalculateTotalTicks();
208  void SetActualSamplingRate(double actual_sampling_rate);
209  CpuProfile* FilteredClone(int security_token_id);
210
211  INLINE(const char* title() const) { return title_; }
212  INLINE(unsigned uid() const) { return uid_; }
213  INLINE(const ProfileTree* top_down() const) { return &top_down_; }
214  INLINE(const ProfileTree* bottom_up() const) { return &bottom_up_; }
215
216  void UpdateTicksScale();
217
218  void ShortPrint();
219  void Print();
220
221 private:
222  const char* title_;
223  unsigned uid_;
224  ProfileTree top_down_;
225  ProfileTree bottom_up_;
226
227  DISALLOW_COPY_AND_ASSIGN(CpuProfile);
228};
229
230
231class CodeMap {
232 public:
233  CodeMap() { }
234  INLINE(void AddCode(Address addr, CodeEntry* entry, unsigned size));
235  INLINE(void MoveCode(Address from, Address to));
236  INLINE(void DeleteCode(Address addr));
237  void AddAlias(Address start, CodeEntry* entry, Address code_start);
238  CodeEntry* FindEntry(Address addr);
239
240  void Print();
241
242 private:
243  struct CodeEntryInfo {
244    CodeEntryInfo(CodeEntry* an_entry, unsigned a_size)
245        : entry(an_entry), size(a_size) { }
246    CodeEntry* entry;
247    unsigned size;
248  };
249
250  struct CodeTreeConfig {
251    typedef Address Key;
252    typedef CodeEntryInfo Value;
253    static const Key kNoKey;
254    static const Value kNoValue;
255    static int Compare(const Key& a, const Key& b) {
256      return a < b ? -1 : (a > b ? 1 : 0);
257    }
258  };
259  typedef SplayTree<CodeTreeConfig> CodeTree;
260
261  class CodeTreePrinter {
262   public:
263    void Call(const Address& key, const CodeEntryInfo& value);
264  };
265
266  CodeTree tree_;
267
268  DISALLOW_COPY_AND_ASSIGN(CodeMap);
269};
270
271
272class CpuProfilesCollection {
273 public:
274  CpuProfilesCollection();
275  ~CpuProfilesCollection();
276
277  bool StartProfiling(const char* title, unsigned uid);
278  bool StartProfiling(String* title, unsigned uid);
279  CpuProfile* StopProfiling(int security_token_id,
280                            const char* title,
281                            double actual_sampling_rate);
282  CpuProfile* StopProfiling(int security_token_id,
283                            String* title,
284                            double actual_sampling_rate);
285  List<CpuProfile*>* Profiles(int security_token_id);
286  const char* GetName(String* name) {
287    return function_and_resource_names_.GetName(name);
288  }
289  CpuProfile* GetProfile(int security_token_id, unsigned uid);
290  inline bool is_last_profile();
291
292  CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag,
293                          String* name, String* resource_name, int line_number);
294  CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, const char* name);
295  CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag,
296                          const char* name_prefix, String* name);
297  CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, int args_count);
298  CodeEntry* NewCodeEntry(int security_token_id);
299
300  // Called from profile generator thread.
301  void AddPathToCurrentProfiles(const Vector<CodeEntry*>& path);
302
303 private:
304  INLINE(const char* GetFunctionName(String* name));
305  INLINE(const char* GetFunctionName(const char* name));
306  const char* GetName(int args_count);
307  List<CpuProfile*>* GetProfilesList(int security_token_id);
308  int TokenToIndex(int security_token_id);
309
310  INLINE(static bool UidsMatch(void* key1, void* key2)) {
311    return key1 == key2;
312  }
313
314  StringsStorage function_and_resource_names_;
315  // Mapping from args_count (int) to char* strings.
316  List<char*> args_count_names_;
317  List<CodeEntry*> code_entries_;
318  List<List<CpuProfile*>* > profiles_by_token_;
319  // Mapping from profiles' uids to indexes in the second nested list
320  // of profiles_by_token_.
321  HashMap profiles_uids_;
322
323  // Accessed by VM thread and profile generator thread.
324  List<CpuProfile*> current_profiles_;
325  Semaphore* current_profiles_semaphore_;
326
327  DISALLOW_COPY_AND_ASSIGN(CpuProfilesCollection);
328};
329
330
331class SampleRateCalculator {
332 public:
333  SampleRateCalculator()
334      : result_(Logger::kSamplingIntervalMs * kResultScale),
335        ticks_per_ms_(Logger::kSamplingIntervalMs),
336        measurements_count_(0),
337        wall_time_query_countdown_(1) {
338  }
339
340  double ticks_per_ms() {
341    return result_ / static_cast<double>(kResultScale);
342  }
343  void Tick();
344  void UpdateMeasurements(double current_time);
345
346  // Instead of querying current wall time each tick,
347  // we use this constant to control query intervals.
348  static const unsigned kWallTimeQueryIntervalMs = 100;
349
350 private:
351  // As the result needs to be accessed from a different thread, we
352  // use type that guarantees atomic writes to memory.  There should
353  // be <= 1000 ticks per second, thus storing a value of a 10 ** 5
354  // order should provide enough precision while keeping away from a
355  // potential overflow.
356  static const int kResultScale = 100000;
357
358  AtomicWord result_;
359  // All other fields are accessed only from the sampler thread.
360  double ticks_per_ms_;
361  unsigned measurements_count_;
362  unsigned wall_time_query_countdown_;
363  double last_wall_time_;
364
365  DISALLOW_COPY_AND_ASSIGN(SampleRateCalculator);
366};
367
368
369class ProfileGenerator {
370 public:
371  explicit ProfileGenerator(CpuProfilesCollection* profiles);
372
373  INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag,
374                                 String* name,
375                                 String* resource_name,
376                                 int line_number)) {
377    return profiles_->NewCodeEntry(tag, name, resource_name, line_number);
378  }
379
380  INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag,
381                                 const char* name)) {
382    return profiles_->NewCodeEntry(tag, name);
383  }
384
385  INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag,
386                                 const char* name_prefix,
387                                 String* name)) {
388    return profiles_->NewCodeEntry(tag, name_prefix, name);
389  }
390
391  INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag,
392                                 int args_count)) {
393    return profiles_->NewCodeEntry(tag, args_count);
394  }
395
396  INLINE(CodeEntry* NewCodeEntry(int security_token_id)) {
397    return profiles_->NewCodeEntry(security_token_id);
398  }
399
400  void RecordTickSample(const TickSample& sample);
401
402  INLINE(CodeMap* code_map()) { return &code_map_; }
403
404  INLINE(void Tick()) { sample_rate_calc_.Tick(); }
405  INLINE(double actual_sampling_rate()) {
406    return sample_rate_calc_.ticks_per_ms();
407  }
408
409  static const char* kAnonymousFunctionName;
410  static const char* kProgramEntryName;
411  static const char* kGarbageCollectorEntryName;
412
413 private:
414  INLINE(CodeEntry* EntryForVMState(StateTag tag));
415
416  CpuProfilesCollection* profiles_;
417  CodeMap code_map_;
418  CodeEntry* program_entry_;
419  CodeEntry* gc_entry_;
420  SampleRateCalculator sample_rate_calc_;
421
422  DISALLOW_COPY_AND_ASSIGN(ProfileGenerator);
423};
424
425
426class HeapSnapshot;
427class HeapEntry;
428
429
430class HeapGraphEdge {
431 public:
432  enum Type {
433    CONTEXT_VARIABLE = v8::HeapGraphEdge::CONTEXT_VARIABLE,
434    ELEMENT = v8::HeapGraphEdge::ELEMENT,
435    PROPERTY = v8::HeapGraphEdge::PROPERTY,
436    INTERNAL = v8::HeapGraphEdge::INTERNAL
437  };
438
439  HeapGraphEdge(Type type, const char* name, HeapEntry* from, HeapEntry* to);
440  HeapGraphEdge(int index, HeapEntry* from, HeapEntry* to);
441
442  Type type() const { return type_; }
443  int index() const {
444    ASSERT(type_ == ELEMENT);
445    return index_;
446  }
447  const char* name() const {
448    ASSERT(type_ == CONTEXT_VARIABLE || type_ == PROPERTY || type_ == INTERNAL);
449    return name_;
450  }
451  HeapEntry* from() const { return from_; }
452  HeapEntry* to() const { return to_; }
453
454 private:
455  Type type_;
456  union {
457    int index_;
458    const char* name_;
459  };
460  HeapEntry* from_;
461  HeapEntry* to_;
462
463  DISALLOW_COPY_AND_ASSIGN(HeapGraphEdge);
464};
465
466
467class HeapGraphPath;
468class CachedHeapGraphPath;
469
470class HeapEntry {
471 public:
472  enum Type {
473    INTERNAL = v8::HeapGraphNode::INTERNAL,
474    ARRAY = v8::HeapGraphNode::ARRAY,
475    STRING = v8::HeapGraphNode::STRING,
476    OBJECT = v8::HeapGraphNode::OBJECT,
477    CODE = v8::HeapGraphNode::CODE,
478    CLOSURE = v8::HeapGraphNode::CLOSURE
479  };
480
481  explicit HeapEntry(HeapSnapshot* snapshot)
482      : snapshot_(snapshot),
483        visited_(false),
484        type_(INTERNAL),
485        name_(""),
486        id_(0),
487        next_auto_index_(0),
488        self_size_(0),
489        security_token_id_(TokenEnumerator::kNoSecurityToken),
490        children_(1),
491        retainers_(0),
492        retaining_paths_(0),
493        total_size_(kUnknownSize),
494        non_shared_total_size_(kUnknownSize),
495        painted_(kUnpainted) { }
496  HeapEntry(HeapSnapshot* snapshot,
497            Type type,
498            const char* name,
499            uint64_t id,
500            int self_size,
501            int security_token_id)
502      : snapshot_(snapshot),
503        visited_(false),
504        type_(type),
505        name_(name),
506        id_(id),
507        next_auto_index_(1),
508        self_size_(self_size),
509        security_token_id_(security_token_id),
510        children_(4),
511        retainers_(4),
512        retaining_paths_(4),
513        total_size_(kUnknownSize),
514        non_shared_total_size_(kUnknownSize),
515        painted_(kUnpainted) { }
516  ~HeapEntry();
517
518  bool visited() const { return visited_; }
519  Type type() const { return type_; }
520  const char* name() const { return name_; }
521  uint64_t id() const { return id_; }
522  int self_size() const { return self_size_; }
523  int security_token_id() const { return security_token_id_; }
524  bool painted_reachable() { return painted_ == kPaintReachable; }
525  bool not_painted_reachable_from_others() {
526    return painted_ != kPaintReachableFromOthers;
527  }
528  const List<HeapGraphEdge*>* children() const { return &children_; }
529  const List<HeapGraphEdge*>* retainers() const { return &retainers_; }
530  const List<HeapGraphPath*>* GetRetainingPaths();
531
532  template<class Visitor>
533  void ApplyAndPaintAllReachable(Visitor* visitor);
534
535  void ClearPaint() { painted_ = kUnpainted; }
536  void CutEdges();
537  void MarkAsVisited() { visited_ = true; }
538  void PaintAllReachable();
539  void PaintReachable() {
540    ASSERT(painted_ == kUnpainted);
541    painted_ = kPaintReachable;
542  }
543  void PaintReachableFromOthers() { painted_ = kPaintReachableFromOthers; }
544  void SetClosureReference(const char* name, HeapEntry* entry);
545  void SetElementReference(int index, HeapEntry* entry);
546  void SetInternalReference(const char* name, HeapEntry* entry);
547  void SetPropertyReference(const char* name, HeapEntry* entry);
548  void SetAutoIndexReference(HeapEntry* entry);
549  void SetUnidirAutoIndexReference(HeapEntry* entry);
550
551  int TotalSize();
552  int NonSharedTotalSize();
553
554  void Print(int max_depth, int indent);
555
556 private:
557  void AddEdge(HeapGraphEdge* edge);
558  int CalculateTotalSize();
559  int CalculateNonSharedTotalSize();
560  void FindRetainingPaths(HeapEntry* node, CachedHeapGraphPath* prev_path);
561  void RemoveChild(HeapGraphEdge* edge);
562  void RemoveRetainer(HeapGraphEdge* edge);
563
564  const char* TypeAsString();
565
566  HeapSnapshot* snapshot_;
567  bool visited_;
568  Type type_;
569  const char* name_;
570  uint64_t id_;
571  int next_auto_index_;
572  int self_size_;
573  int security_token_id_;
574  List<HeapGraphEdge*> children_;
575  List<HeapGraphEdge*> retainers_;
576  List<HeapGraphPath*> retaining_paths_;
577  int total_size_;
578  int non_shared_total_size_;
579  int painted_;
580
581  static const int kUnknownSize = -1;
582  static const int kUnpainted = 0;
583  static const int kPaintReachable = 1;
584  static const int kPaintReachableFromOthers = 2;
585
586  DISALLOW_IMPLICIT_CONSTRUCTORS(HeapEntry);
587};
588
589
590class HeapGraphPath {
591 public:
592  HeapGraphPath()
593      : path_(8) { }
594  explicit HeapGraphPath(const List<HeapGraphEdge*>& path);
595
596  void Add(HeapGraphEdge* edge) { path_.Add(edge); }
597  void Set(int index, HeapGraphEdge* edge) { path_[index] = edge; }
598  const List<HeapGraphEdge*>* path() const { return &path_; }
599
600  void Print();
601
602 private:
603  List<HeapGraphEdge*> path_;
604
605  DISALLOW_COPY_AND_ASSIGN(HeapGraphPath);
606};
607
608
609class HeapEntriesMap {
610 public:
611  HeapEntriesMap();
612  ~HeapEntriesMap();
613
614  void Alias(HeapObject* object, HeapEntry* entry);
615  void Apply(void (HeapEntry::*Func)(void));
616  template<class Visitor>
617  void Apply(Visitor* visitor);
618  HeapEntry* Map(HeapObject* object);
619  void Pair(HeapObject* object, HeapEntry* entry);
620
621  uint32_t capacity() { return entries_.capacity(); }
622
623 private:
624  INLINE(uint32_t Hash(HeapObject* object)) {
625    return static_cast<uint32_t>(reinterpret_cast<intptr_t>(object));
626  }
627  INLINE(static bool HeapObjectsMatch(void* key1, void* key2)) {
628    return key1 == key2;
629  }
630  INLINE(bool IsAlias(void* ptr)) {
631    return reinterpret_cast<intptr_t>(ptr) & kAliasTag;
632  }
633
634  static const intptr_t kAliasTag = 1;
635
636  HashMap entries_;
637
638  DISALLOW_COPY_AND_ASSIGN(HeapEntriesMap);
639};
640
641
642class HeapSnapshotsCollection;
643class HeapSnapshotsDiff;
644
645// HeapSnapshot represents a single heap snapshot. It is stored in
646// HeapSnapshotsCollection, which is also a factory for
647// HeapSnapshots. All HeapSnapshots share strings copied from JS heap
648// to be able to return them even if they were collected.
649// HeapSnapshotGenerator fills in a HeapSnapshot.
650class HeapSnapshot {
651 public:
652  HeapSnapshot(HeapSnapshotsCollection* collection,
653               const char* title,
654               unsigned uid);
655  ~HeapSnapshot();
656  void ClearPaint();
657  void CutObjectsFromForeignSecurityContexts();
658  HeapEntry* GetEntry(Object* object);
659  void SetClosureReference(
660      HeapEntry* parent, String* reference_name, Object* child);
661  void SetElementReference(HeapEntry* parent, int index, Object* child);
662  void SetInternalReference(
663      HeapEntry* parent, const char* reference_name, Object* child);
664  void SetPropertyReference(
665      HeapEntry* parent, String* reference_name, Object* child);
666
667  INLINE(const char* title() const) { return title_; }
668  INLINE(unsigned uid() const) { return uid_; }
669  const HeapEntry* const_root() const { return &root_; }
670  HeapEntry* root() { return &root_; }
671  template<class Visitor>
672  void IterateEntries(Visitor* visitor) { entries_.Apply(visitor); }
673  List<HeapEntry*>* GetSortedEntriesList();
674  HeapSnapshotsDiff* CompareWith(HeapSnapshot* snapshot);
675
676  void Print(int max_depth);
677
678 private:
679  HeapEntry* AddEntry(HeapObject* object, HeapEntry::Type type) {
680    return AddEntry(object, type, "");
681  }
682  HeapEntry* AddEntry(
683      HeapObject* object, HeapEntry::Type type, const char* name);
684  void AddEntryAlias(HeapObject* object, HeapEntry* entry) {
685    entries_.Alias(object, entry);
686  }
687  HeapEntry* FindEntry(HeapObject* object) {
688    return entries_.Map(object);
689  }
690  int GetGlobalSecurityToken();
691  int GetObjectSecurityToken(HeapObject* obj);
692  static int GetObjectSize(HeapObject* obj);
693  static int CalculateNetworkSize(JSObject* obj);
694
695  HeapSnapshotsCollection* collection_;
696  const char* title_;
697  unsigned uid_;
698  HeapEntry root_;
699  // Mapping from HeapObject* pointers to HeapEntry* pointers.
700  HeapEntriesMap entries_;
701  // Entries sorted by id.
702  List<HeapEntry*>* sorted_entries_;
703
704  DISALLOW_COPY_AND_ASSIGN(HeapSnapshot);
705};
706
707
708class HeapObjectsMap {
709 public:
710  HeapObjectsMap();
711  ~HeapObjectsMap();
712
713  void SnapshotGenerationFinished();
714  uint64_t FindObject(Address addr);
715  void MoveObject(Address from, Address to);
716
717 private:
718  struct EntryInfo {
719    explicit EntryInfo(uint64_t id) : id(id), accessed(true) { }
720    EntryInfo(uint64_t id, bool accessed) : id(id), accessed(accessed) { }
721    uint64_t id;
722    bool accessed;
723  };
724
725  void AddEntry(Address addr, uint64_t id);
726  uint64_t FindEntry(Address addr);
727  void RemoveDeadEntries();
728
729  static bool AddressesMatch(void* key1, void* key2) {
730    return key1 == key2;
731  }
732
733  static uint32_t AddressHash(Address addr) {
734    return static_cast<int32_t>(reinterpret_cast<intptr_t>(addr));
735  }
736
737  bool initial_fill_mode_;
738  uint64_t next_id_;
739  HashMap entries_map_;
740  List<EntryInfo>* entries_;
741
742  DISALLOW_COPY_AND_ASSIGN(HeapObjectsMap);
743};
744
745
746class HeapSnapshotsDiff {
747 public:
748  HeapSnapshotsDiff(HeapSnapshot* snapshot1, HeapSnapshot* snapshot2)
749      : snapshot1_(snapshot1),
750        snapshot2_(snapshot2),
751        additions_root_(new HeapEntry(snapshot2)),
752        deletions_root_(new HeapEntry(snapshot1)) { }
753
754  ~HeapSnapshotsDiff() {
755    delete deletions_root_;
756    delete additions_root_;
757  }
758
759  void AddAddedEntry(HeapEntry* entry) {
760    additions_root_->SetUnidirAutoIndexReference(entry);
761  }
762
763  void AddDeletedEntry(HeapEntry* entry) {
764    deletions_root_->SetUnidirAutoIndexReference(entry);
765  }
766
767  const HeapEntry* additions_root() const { return additions_root_; }
768  const HeapEntry* deletions_root() const { return deletions_root_; }
769
770 private:
771  HeapSnapshot* snapshot1_;
772  HeapSnapshot* snapshot2_;
773  HeapEntry* additions_root_;
774  HeapEntry* deletions_root_;
775
776  DISALLOW_COPY_AND_ASSIGN(HeapSnapshotsDiff);
777};
778
779
780class HeapSnapshotsComparator {
781 public:
782  HeapSnapshotsComparator() { }
783  ~HeapSnapshotsComparator();
784  HeapSnapshotsDiff* Compare(HeapSnapshot* snapshot1, HeapSnapshot* snapshot2);
785 private:
786  List<HeapSnapshotsDiff*> diffs_;
787
788  DISALLOW_COPY_AND_ASSIGN(HeapSnapshotsComparator);
789};
790
791
792class HeapSnapshotsCollection {
793 public:
794  HeapSnapshotsCollection();
795  ~HeapSnapshotsCollection();
796
797  bool is_tracking_objects() { return is_tracking_objects_; }
798
799  HeapSnapshot* NewSnapshot(const char* name, unsigned uid);
800  void SnapshotGenerationFinished() { ids_.SnapshotGenerationFinished(); }
801  List<HeapSnapshot*>* snapshots() { return &snapshots_; }
802  HeapSnapshot* GetSnapshot(unsigned uid);
803
804  const char* GetName(String* name) { return names_.GetName(name); }
805
806  TokenEnumerator* token_enumerator() { return token_enumerator_; }
807
808  uint64_t GetObjectId(Address addr) { return ids_.FindObject(addr); }
809  void ObjectMoveEvent(Address from, Address to) { ids_.MoveObject(from, to); }
810
811  HeapSnapshotsDiff* CompareSnapshots(HeapSnapshot* snapshot1,
812                                      HeapSnapshot* snapshot2);
813
814 private:
815  INLINE(static bool HeapSnapshotsMatch(void* key1, void* key2)) {
816    return key1 == key2;
817  }
818
819  bool is_tracking_objects_;  // Whether tracking object moves is needed.
820  List<HeapSnapshot*> snapshots_;
821  // Mapping from snapshots' uids to HeapSnapshot* pointers.
822  HashMap snapshots_uids_;
823  StringsStorage names_;
824  TokenEnumerator* token_enumerator_;
825  // Mapping from HeapObject addresses to objects' uids.
826  HeapObjectsMap ids_;
827  HeapSnapshotsComparator comparator_;
828
829  DISALLOW_COPY_AND_ASSIGN(HeapSnapshotsCollection);
830};
831
832
833class HeapSnapshotGenerator {
834 public:
835  explicit HeapSnapshotGenerator(HeapSnapshot* snapshot);
836  void GenerateSnapshot();
837
838 private:
839  void ExtractReferences(HeapObject* obj);
840  void ExtractClosureReferences(JSObject* js_obj, HeapEntry* entry);
841  void ExtractPropertyReferences(JSObject* js_obj, HeapEntry* entry);
842  void ExtractElementReferences(JSObject* js_obj, HeapEntry* entry);
843
844  HeapSnapshot* snapshot_;
845
846  DISALLOW_COPY_AND_ASSIGN(HeapSnapshotGenerator);
847};
848
849} }  // namespace v8::internal
850
851#endif  // ENABLE_LOGGING_AND_PROFILING
852
853#endif  // V8_PROFILE_GENERATOR_H_
854