profile-generator.h revision b0fe1620dcb4135ac3ab2d66ff93072373911299
1// Copyright 2010 the V8 project authors. All rights reserved. 2// Redistribution and use in source and binary forms, with or without 3// modification, are permitted provided that the following conditions are 4// met: 5// 6// * Redistributions of source code must retain the above copyright 7// notice, this list of conditions and the following disclaimer. 8// * Redistributions in binary form must reproduce the above 9// copyright notice, this list of conditions and the following 10// disclaimer in the documentation and/or other materials provided 11// with the distribution. 12// * Neither the name of Google Inc. nor the names of its 13// contributors may be used to endorse or promote products derived 14// from this software without specific prior written permission. 15// 16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28#ifndef V8_PROFILE_GENERATOR_H_ 29#define V8_PROFILE_GENERATOR_H_ 30 31#ifdef ENABLE_LOGGING_AND_PROFILING 32 33#include "hashmap.h" 34#include "../include/v8-profiler.h" 35 36namespace v8 { 37namespace internal { 38 39class TokenEnumerator { 40 public: 41 TokenEnumerator(); 42 ~TokenEnumerator(); 43 int GetTokenId(Object* token); 44 45 static const int kNoSecurityToken = -1; 46 static const int kInheritsSecurityToken = -2; 47 48 private: 49 static void TokenRemovedCallback(v8::Persistent<v8::Value> handle, 50 void* parameter); 51 void TokenRemoved(Object** token_location); 52 53 List<Object**> token_locations_; 54 List<bool> token_removed_; 55 56 friend class TokenEnumeratorTester; 57 58 DISALLOW_COPY_AND_ASSIGN(TokenEnumerator); 59}; 60 61 62// Provides a storage of strings allocated in C++ heap, to hold them 63// forever, even if they disappear from JS heap or external storage. 64class StringsStorage { 65 public: 66 StringsStorage(); 67 ~StringsStorage(); 68 69 const char* GetName(String* name); 70 const char* GetName(int index); 71 inline const char* GetFunctionName(String* name); 72 inline const char* GetFunctionName(const char* name); 73 74 private: 75 INLINE(static bool StringsMatch(void* key1, void* key2)) { 76 return strcmp(reinterpret_cast<char*>(key1), 77 reinterpret_cast<char*>(key2)) == 0; 78 } 79 80 // Mapping of strings by String::Hash to const char* strings. 81 HashMap names_; 82 // Mapping from ints to char* strings. 83 List<char*> index_names_; 84 85 DISALLOW_COPY_AND_ASSIGN(StringsStorage); 86}; 87 88 89class CodeEntry { 90 public: 91 explicit INLINE(CodeEntry(int security_token_id)); 92 // CodeEntry doesn't own name strings, just references them. 93 INLINE(CodeEntry(Logger::LogEventsAndTags tag, 94 const char* name_prefix, 95 const char* name, 96 const char* resource_name, 97 int line_number, 98 int security_token_id)); 99 100 INLINE(bool is_js_function() const) { return is_js_function_tag(tag_); } 101 INLINE(const char* name_prefix() const) { return name_prefix_; } 102 INLINE(bool has_name_prefix() const) { return name_prefix_[0] != '\0'; } 103 INLINE(const char* name() const) { return name_; } 104 INLINE(const char* resource_name() const) { return resource_name_; } 105 INLINE(int line_number() const) { return line_number_; } 106 INLINE(int security_token_id() const) { return security_token_id_; } 107 108 INLINE(static bool is_js_function_tag(Logger::LogEventsAndTags tag)); 109 110 void CopyData(const CodeEntry& source); 111 uint32_t GetCallUid() const; 112 bool IsSameAs(CodeEntry* entry) const; 113 114 static const char* kEmptyNamePrefix; 115 116 private: 117 Logger::LogEventsAndTags tag_; 118 const char* name_prefix_; 119 const char* name_; 120 const char* resource_name_; 121 int line_number_; 122 int security_token_id_; 123 124 DISALLOW_COPY_AND_ASSIGN(CodeEntry); 125}; 126 127 128class ProfileTree; 129 130class ProfileNode { 131 public: 132 INLINE(ProfileNode(ProfileTree* tree, CodeEntry* entry)); 133 134 ProfileNode* FindChild(CodeEntry* entry); 135 ProfileNode* FindOrAddChild(CodeEntry* entry); 136 INLINE(void IncrementSelfTicks()) { ++self_ticks_; } 137 INLINE(void IncreaseSelfTicks(unsigned amount)) { self_ticks_ += amount; } 138 INLINE(void IncreaseTotalTicks(unsigned amount)) { total_ticks_ += amount; } 139 140 INLINE(CodeEntry* entry() const) { return entry_; } 141 INLINE(unsigned self_ticks() const) { return self_ticks_; } 142 INLINE(unsigned total_ticks() const) { return total_ticks_; } 143 INLINE(const List<ProfileNode*>* children() const) { return &children_list_; } 144 double GetSelfMillis() const; 145 double GetTotalMillis() const; 146 147 void Print(int indent); 148 149 private: 150 INLINE(static bool CodeEntriesMatch(void* entry1, void* entry2)) { 151 return reinterpret_cast<CodeEntry*>(entry1)->IsSameAs( 152 reinterpret_cast<CodeEntry*>(entry2)); 153 } 154 155 INLINE(static uint32_t CodeEntryHash(CodeEntry* entry)) { 156 return entry->GetCallUid(); 157 } 158 159 ProfileTree* tree_; 160 CodeEntry* entry_; 161 unsigned total_ticks_; 162 unsigned self_ticks_; 163 // Mapping from CodeEntry* to ProfileNode* 164 HashMap children_; 165 List<ProfileNode*> children_list_; 166 167 DISALLOW_COPY_AND_ASSIGN(ProfileNode); 168}; 169 170 171class ProfileTree { 172 public: 173 ProfileTree(); 174 ~ProfileTree(); 175 176 void AddPathFromEnd(const Vector<CodeEntry*>& path); 177 void AddPathFromStart(const Vector<CodeEntry*>& path); 178 void CalculateTotalTicks(); 179 void FilteredClone(ProfileTree* src, int security_token_id); 180 181 double TicksToMillis(unsigned ticks) const { 182 return ticks * ms_to_ticks_scale_; 183 } 184 ProfileNode* root() const { return root_; } 185 void SetTickRatePerMs(double ticks_per_ms); 186 187 void ShortPrint(); 188 void Print() { 189 root_->Print(0); 190 } 191 192 private: 193 template <typename Callback> 194 void TraverseDepthFirst(Callback* callback); 195 196 CodeEntry root_entry_; 197 ProfileNode* root_; 198 double ms_to_ticks_scale_; 199 200 DISALLOW_COPY_AND_ASSIGN(ProfileTree); 201}; 202 203 204class CpuProfile { 205 public: 206 CpuProfile(const char* title, unsigned uid) 207 : title_(title), uid_(uid) { } 208 209 // Add pc -> ... -> main() call path to the profile. 210 void AddPath(const Vector<CodeEntry*>& path); 211 void CalculateTotalTicks(); 212 void SetActualSamplingRate(double actual_sampling_rate); 213 CpuProfile* FilteredClone(int security_token_id); 214 215 INLINE(const char* title() const) { return title_; } 216 INLINE(unsigned uid() const) { return uid_; } 217 INLINE(const ProfileTree* top_down() const) { return &top_down_; } 218 INLINE(const ProfileTree* bottom_up() const) { return &bottom_up_; } 219 220 void UpdateTicksScale(); 221 222 void ShortPrint(); 223 void Print(); 224 225 private: 226 const char* title_; 227 unsigned uid_; 228 ProfileTree top_down_; 229 ProfileTree bottom_up_; 230 231 DISALLOW_COPY_AND_ASSIGN(CpuProfile); 232}; 233 234 235class CodeMap { 236 public: 237 CodeMap() { } 238 INLINE(void AddCode(Address addr, CodeEntry* entry, unsigned size)); 239 INLINE(void MoveCode(Address from, Address to)); 240 INLINE(void DeleteCode(Address addr)); 241 void AddAlias(Address start, CodeEntry* entry, Address code_start); 242 CodeEntry* FindEntry(Address addr); 243 244 void Print(); 245 246 private: 247 struct CodeEntryInfo { 248 CodeEntryInfo(CodeEntry* an_entry, unsigned a_size) 249 : entry(an_entry), size(a_size) { } 250 CodeEntry* entry; 251 unsigned size; 252 }; 253 254 struct CodeTreeConfig { 255 typedef Address Key; 256 typedef CodeEntryInfo Value; 257 static const Key kNoKey; 258 static const Value kNoValue; 259 static int Compare(const Key& a, const Key& b) { 260 return a < b ? -1 : (a > b ? 1 : 0); 261 } 262 }; 263 typedef SplayTree<CodeTreeConfig> CodeTree; 264 265 class CodeTreePrinter { 266 public: 267 void Call(const Address& key, const CodeEntryInfo& value); 268 }; 269 270 CodeTree tree_; 271 272 DISALLOW_COPY_AND_ASSIGN(CodeMap); 273}; 274 275 276class CpuProfilesCollection { 277 public: 278 CpuProfilesCollection(); 279 ~CpuProfilesCollection(); 280 281 bool StartProfiling(const char* title, unsigned uid); 282 bool StartProfiling(String* title, unsigned uid); 283 CpuProfile* StopProfiling(int security_token_id, 284 const char* title, 285 double actual_sampling_rate); 286 List<CpuProfile*>* Profiles(int security_token_id); 287 const char* GetName(String* name) { 288 return function_and_resource_names_.GetName(name); 289 } 290 const char* GetName(int args_count) { 291 return function_and_resource_names_.GetName(args_count); 292 } 293 CpuProfile* GetProfile(int security_token_id, unsigned uid); 294 bool IsLastProfile(const char* title); 295 296 CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, 297 String* name, String* resource_name, int line_number); 298 CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, const char* name); 299 CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, 300 const char* name_prefix, String* name); 301 CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, int args_count); 302 CodeEntry* NewCodeEntry(int security_token_id); 303 304 // Called from profile generator thread. 305 void AddPathToCurrentProfiles(const Vector<CodeEntry*>& path); 306 307 // Limits the number of profiles that can be simultaneously collected. 308 static const int kMaxSimultaneousProfiles = 100; 309 310 private: 311 const char* GetFunctionName(String* name) { 312 return function_and_resource_names_.GetFunctionName(name); 313 } 314 const char* GetFunctionName(const char* name) { 315 return function_and_resource_names_.GetFunctionName(name); 316 } 317 List<CpuProfile*>* GetProfilesList(int security_token_id); 318 int TokenToIndex(int security_token_id); 319 320 INLINE(static bool UidsMatch(void* key1, void* key2)) { 321 return key1 == key2; 322 } 323 324 StringsStorage function_and_resource_names_; 325 List<CodeEntry*> code_entries_; 326 List<List<CpuProfile*>* > profiles_by_token_; 327 // Mapping from profiles' uids to indexes in the second nested list 328 // of profiles_by_token_. 329 HashMap profiles_uids_; 330 331 // Accessed by VM thread and profile generator thread. 332 List<CpuProfile*> current_profiles_; 333 Semaphore* current_profiles_semaphore_; 334 335 DISALLOW_COPY_AND_ASSIGN(CpuProfilesCollection); 336}; 337 338 339class SampleRateCalculator { 340 public: 341 SampleRateCalculator() 342 : result_(Logger::kSamplingIntervalMs * kResultScale), 343 ticks_per_ms_(Logger::kSamplingIntervalMs), 344 measurements_count_(0), 345 wall_time_query_countdown_(1) { 346 } 347 348 double ticks_per_ms() { 349 return result_ / static_cast<double>(kResultScale); 350 } 351 void Tick(); 352 void UpdateMeasurements(double current_time); 353 354 // Instead of querying current wall time each tick, 355 // we use this constant to control query intervals. 356 static const unsigned kWallTimeQueryIntervalMs = 100; 357 358 private: 359 // As the result needs to be accessed from a different thread, we 360 // use type that guarantees atomic writes to memory. There should 361 // be <= 1000 ticks per second, thus storing a value of a 10 ** 5 362 // order should provide enough precision while keeping away from a 363 // potential overflow. 364 static const int kResultScale = 100000; 365 366 AtomicWord result_; 367 // All other fields are accessed only from the sampler thread. 368 double ticks_per_ms_; 369 unsigned measurements_count_; 370 unsigned wall_time_query_countdown_; 371 double last_wall_time_; 372 373 DISALLOW_COPY_AND_ASSIGN(SampleRateCalculator); 374}; 375 376 377class ProfileGenerator { 378 public: 379 explicit ProfileGenerator(CpuProfilesCollection* profiles); 380 381 INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, 382 String* name, 383 String* resource_name, 384 int line_number)) { 385 return profiles_->NewCodeEntry(tag, name, resource_name, line_number); 386 } 387 388 INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, 389 const char* name)) { 390 return profiles_->NewCodeEntry(tag, name); 391 } 392 393 INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, 394 const char* name_prefix, 395 String* name)) { 396 return profiles_->NewCodeEntry(tag, name_prefix, name); 397 } 398 399 INLINE(CodeEntry* NewCodeEntry(Logger::LogEventsAndTags tag, 400 int args_count)) { 401 return profiles_->NewCodeEntry(tag, args_count); 402 } 403 404 INLINE(CodeEntry* NewCodeEntry(int security_token_id)) { 405 return profiles_->NewCodeEntry(security_token_id); 406 } 407 408 void RecordTickSample(const TickSample& sample); 409 410 INLINE(CodeMap* code_map()) { return &code_map_; } 411 412 INLINE(void Tick()) { sample_rate_calc_.Tick(); } 413 INLINE(double actual_sampling_rate()) { 414 return sample_rate_calc_.ticks_per_ms(); 415 } 416 417 static const char* kAnonymousFunctionName; 418 static const char* kProgramEntryName; 419 static const char* kGarbageCollectorEntryName; 420 421 private: 422 INLINE(CodeEntry* EntryForVMState(StateTag tag)); 423 424 CpuProfilesCollection* profiles_; 425 CodeMap code_map_; 426 CodeEntry* program_entry_; 427 CodeEntry* gc_entry_; 428 SampleRateCalculator sample_rate_calc_; 429 430 DISALLOW_COPY_AND_ASSIGN(ProfileGenerator); 431}; 432 433 434class HeapEntry; 435 436class HeapGraphEdge BASE_EMBEDDED { 437 public: 438 enum Type { 439 kContextVariable = v8::HeapGraphEdge::kContextVariable, 440 kElement = v8::HeapGraphEdge::kElement, 441 kProperty = v8::HeapGraphEdge::kProperty, 442 kInternal = v8::HeapGraphEdge::kInternal, 443 kHidden = v8::HeapGraphEdge::kHidden, 444 kShortcut = v8::HeapGraphEdge::kShortcut 445 }; 446 447 HeapGraphEdge() { } 448 void Init(int child_index, Type type, const char* name, HeapEntry* to); 449 void Init(int child_index, Type type, int index, HeapEntry* to); 450 void Init(int child_index, int index, HeapEntry* to); 451 452 Type type() { return static_cast<Type>(type_); } 453 int index() { 454 ASSERT(type_ == kElement || type_ == kHidden); 455 return index_; 456 } 457 const char* name() { 458 ASSERT(type_ == kContextVariable 459 || type_ == kProperty 460 || type_ == kInternal 461 || type_ == kShortcut); 462 return name_; 463 } 464 HeapEntry* to() { return to_; } 465 466 HeapEntry* From(); 467 468 private: 469 int child_index_ : 29; 470 unsigned type_ : 3; 471 union { 472 int index_; 473 const char* name_; 474 }; 475 HeapEntry* to_; 476 477 DISALLOW_COPY_AND_ASSIGN(HeapGraphEdge); 478}; 479 480 481class CachedHeapGraphPath; 482class HeapGraphPath; 483class HeapSnapshot; 484 485// HeapEntry instances represent an entity from the heap (or a special 486// virtual node, e.g. root). To make heap snapshots more compact, 487// HeapEntries has a special memory layout (no Vectors or Lists used): 488// 489// +-----------------+ 490// HeapEntry 491// +-----------------+ 492// HeapGraphEdge | 493// ... } children_count 494// HeapGraphEdge | 495// +-----------------+ 496// HeapGraphEdge* | 497// ... } retainers_count 498// HeapGraphEdge* | 499// +-----------------+ 500// 501// In a HeapSnapshot, all entries are hand-allocated in a continuous array 502// of raw bytes. 503// 504class HeapEntry BASE_EMBEDDED { 505 public: 506 enum Type { 507 kHidden = v8::HeapGraphNode::kHidden, 508 kArray = v8::HeapGraphNode::kArray, 509 kString = v8::HeapGraphNode::kString, 510 kObject = v8::HeapGraphNode::kObject, 511 kCode = v8::HeapGraphNode::kCode, 512 kClosure = v8::HeapGraphNode::kClosure, 513 kRegExp = v8::HeapGraphNode::kRegExp, 514 kHeapNumber = v8::HeapGraphNode::kHeapNumber 515 }; 516 517 HeapEntry() { } 518 void Init(HeapSnapshot* snapshot, 519 Type type, 520 const char* name, 521 uint64_t id, 522 int self_size, 523 int children_count, 524 int retainers_count); 525 526 HeapSnapshot* snapshot() { return snapshot_; } 527 Type type() { return static_cast<Type>(type_); } 528 const char* name() { return name_; } 529 inline uint64_t id(); 530 int self_size() { return self_size_; } 531 int retained_size() { return retained_size_; } 532 void add_retained_size(int size) { retained_size_ += size; } 533 void set_retained_size(int value) { retained_size_ = value; } 534 int ordered_index() { return ordered_index_; } 535 void set_ordered_index(int value) { ordered_index_ = value; } 536 537 Vector<HeapGraphEdge> children() { 538 return Vector<HeapGraphEdge>(children_arr(), children_count_); } 539 Vector<HeapGraphEdge*> retainers() { 540 return Vector<HeapGraphEdge*>(retainers_arr(), retainers_count_); } 541 List<HeapGraphPath*>* GetRetainingPaths(); 542 HeapEntry* dominator() { return dominator_; } 543 void set_dominator(HeapEntry* entry) { dominator_ = entry; } 544 545 void clear_paint() { painted_ = kUnpainted; } 546 bool painted_reachable() { return painted_ == kPainted; } 547 void paint_reachable() { 548 ASSERT(painted_ == kUnpainted); 549 painted_ = kPainted; 550 } 551 bool not_painted_reachable_from_others() { 552 return painted_ != kPaintedReachableFromOthers; 553 } 554 void paint_reachable_from_others() { 555 painted_ = kPaintedReachableFromOthers; 556 } 557 template<class Visitor> 558 void ApplyAndPaintAllReachable(Visitor* visitor); 559 void PaintAllReachable(); 560 561 void SetIndexedReference(HeapGraphEdge::Type type, 562 int child_index, 563 int index, 564 HeapEntry* entry, 565 int retainer_index); 566 void SetNamedReference(HeapGraphEdge::Type type, 567 int child_index, 568 const char* name, 569 HeapEntry* entry, 570 int retainer_index); 571 void SetUnidirElementReference(int child_index, int index, HeapEntry* entry); 572 573 int EntrySize() { return EntriesSize(1, children_count_, retainers_count_); } 574 int RetainedSize(bool exact); 575 List<HeapGraphPath*>* CalculateRetainingPaths(); 576 577 void Print(int max_depth, int indent); 578 579 static int EntriesSize(int entries_count, 580 int children_count, 581 int retainers_count); 582 static uint32_t Hash(HeapEntry* entry) { 583 return ComputeIntegerHash( 584 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(entry))); 585 } 586 static bool Match(void* entry1, void* entry2) { return entry1 == entry2; } 587 588 private: 589 HeapGraphEdge* children_arr() { 590 return reinterpret_cast<HeapGraphEdge*>(this + 1); 591 } 592 HeapGraphEdge** retainers_arr() { 593 return reinterpret_cast<HeapGraphEdge**>(children_arr() + children_count_); 594 } 595 void CalculateExactRetainedSize(); 596 void FindRetainingPaths(CachedHeapGraphPath* prev_path, 597 List<HeapGraphPath*>* retaining_paths); 598 const char* TypeAsString(); 599 600 unsigned painted_: 2; 601 unsigned type_: 3; 602 int children_count_: 27; 603 int retainers_count_; 604 int self_size_; 605 union { 606 int ordered_index_; // Used during dominator tree building. 607 int retained_size_; // At that moment, there is no retained size yet. 608 }; 609 HeapEntry* dominator_; 610 HeapSnapshot* snapshot_; 611 struct Id { 612 uint32_t id1_; 613 uint32_t id2_; 614 } id_; // This is to avoid extra padding of 64-bit value. 615 const char* name_; 616 617 // Paints used for exact retained sizes calculation. 618 static const unsigned kUnpainted = 0; 619 static const unsigned kPainted = 1; 620 static const unsigned kPaintedReachableFromOthers = 2; 621 622 static const int kExactRetainedSizeTag = 1; 623 624 DISALLOW_COPY_AND_ASSIGN(HeapEntry); 625}; 626 627 628class HeapGraphPath { 629 public: 630 HeapGraphPath() 631 : path_(8) { } 632 explicit HeapGraphPath(const List<HeapGraphEdge*>& path); 633 634 void Add(HeapGraphEdge* edge) { path_.Add(edge); } 635 void Set(int index, HeapGraphEdge* edge) { path_[index] = edge; } 636 const List<HeapGraphEdge*>* path() { return &path_; } 637 638 void Print(); 639 640 private: 641 List<HeapGraphEdge*> path_; 642 643 DISALLOW_COPY_AND_ASSIGN(HeapGraphPath); 644}; 645 646 647class HeapSnapshotsCollection; 648class HeapSnapshotsDiff; 649 650// HeapSnapshot represents a single heap snapshot. It is stored in 651// HeapSnapshotsCollection, which is also a factory for 652// HeapSnapshots. All HeapSnapshots share strings copied from JS heap 653// to be able to return them even if they were collected. 654// HeapSnapshotGenerator fills in a HeapSnapshot. 655class HeapSnapshot { 656 public: 657 enum Type { 658 kFull = v8::HeapSnapshot::kFull, 659 kAggregated = v8::HeapSnapshot::kAggregated 660 }; 661 662 HeapSnapshot(HeapSnapshotsCollection* collection, 663 Type type, 664 const char* title, 665 unsigned uid); 666 ~HeapSnapshot(); 667 668 HeapSnapshotsCollection* collection() { return collection_; } 669 Type type() { return type_; } 670 const char* title() { return title_; } 671 unsigned uid() { return uid_; } 672 HeapEntry* root() { return root_entry_; } 673 HeapEntry* gc_roots() { return gc_roots_entry_; } 674 List<HeapEntry*>* entries() { return &entries_; } 675 676 void AllocateEntries( 677 int entries_count, int children_count, int retainers_count); 678 HeapEntry* AddEntry( 679 HeapObject* object, int children_count, int retainers_count); 680 HeapEntry* AddEntry(HeapEntry::Type type, 681 const char* name, 682 uint64_t id, 683 int size, 684 int children_count, 685 int retainers_count); 686 void ClearPaint(); 687 HeapSnapshotsDiff* CompareWith(HeapSnapshot* snapshot); 688 HeapEntry* GetEntryById(uint64_t id); 689 List<HeapGraphPath*>* GetRetainingPaths(HeapEntry* entry); 690 List<HeapEntry*>* GetSortedEntriesList(); 691 template<class Visitor> 692 void IterateEntries(Visitor* visitor) { entries_.Iterate(visitor); } 693 void SetDominatorsToSelf(); 694 695 void Print(int max_depth); 696 void PrintEntriesSize(); 697 698 static HeapObject* const kInternalRootObject; 699 static HeapObject* const kGcRootsObject; 700 701 private: 702 HeapEntry* AddEntry(HeapObject* object, 703 HeapEntry::Type type, 704 const char* name, 705 int children_count, 706 int retainers_count); 707 HeapEntry* GetNextEntryToInit(); 708 709 HeapSnapshotsCollection* collection_; 710 Type type_; 711 const char* title_; 712 unsigned uid_; 713 HeapEntry* root_entry_; 714 HeapEntry* gc_roots_entry_; 715 char* raw_entries_; 716 List<HeapEntry*> entries_; 717 bool entries_sorted_; 718 HashMap retaining_paths_; 719#ifdef DEBUG 720 int raw_entries_size_; 721#endif 722 723 friend class HeapSnapshotTester; 724 725 DISALLOW_COPY_AND_ASSIGN(HeapSnapshot); 726}; 727 728 729class HeapObjectsMap { 730 public: 731 HeapObjectsMap(); 732 ~HeapObjectsMap(); 733 734 void SnapshotGenerationFinished(); 735 uint64_t FindObject(Address addr); 736 void MoveObject(Address from, Address to); 737 738 static const uint64_t kInternalRootObjectId; 739 static const uint64_t kGcRootsObjectId; 740 static const uint64_t kFirstAvailableObjectId; 741 742 private: 743 struct EntryInfo { 744 explicit EntryInfo(uint64_t id) : id(id), accessed(true) { } 745 EntryInfo(uint64_t id, bool accessed) : id(id), accessed(accessed) { } 746 uint64_t id; 747 bool accessed; 748 }; 749 750 void AddEntry(Address addr, uint64_t id); 751 uint64_t FindEntry(Address addr); 752 void RemoveDeadEntries(); 753 754 static bool AddressesMatch(void* key1, void* key2) { 755 return key1 == key2; 756 } 757 758 static uint32_t AddressHash(Address addr) { 759 return ComputeIntegerHash( 760 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(addr))); 761 } 762 763 bool initial_fill_mode_; 764 uint64_t next_id_; 765 HashMap entries_map_; 766 List<EntryInfo>* entries_; 767 768 DISALLOW_COPY_AND_ASSIGN(HeapObjectsMap); 769}; 770 771 772class HeapSnapshotsDiff { 773 public: 774 HeapSnapshotsDiff(HeapSnapshot* snapshot1, HeapSnapshot* snapshot2) 775 : snapshot1_(snapshot1), 776 snapshot2_(snapshot2), 777 raw_additions_root_(NULL), 778 raw_deletions_root_(NULL) { } 779 780 ~HeapSnapshotsDiff() { 781 DeleteArray(raw_deletions_root_); 782 DeleteArray(raw_additions_root_); 783 } 784 785 void AddAddedEntry(int child_index, int index, HeapEntry* entry) { 786 additions_root()->SetUnidirElementReference(child_index, index, entry); 787 } 788 789 void AddDeletedEntry(int child_index, int index, HeapEntry* entry) { 790 deletions_root()->SetUnidirElementReference(child_index, index, entry); 791 } 792 793 void CreateRoots(int additions_count, int deletions_count); 794 795 HeapEntry* additions_root() { 796 return reinterpret_cast<HeapEntry*>(raw_additions_root_); 797 } 798 HeapEntry* deletions_root() { 799 return reinterpret_cast<HeapEntry*>(raw_deletions_root_); 800 } 801 802 private: 803 HeapSnapshot* snapshot1_; 804 HeapSnapshot* snapshot2_; 805 char* raw_additions_root_; 806 char* raw_deletions_root_; 807 808 DISALLOW_COPY_AND_ASSIGN(HeapSnapshotsDiff); 809}; 810 811 812class HeapSnapshotsComparator { 813 public: 814 HeapSnapshotsComparator() { } 815 ~HeapSnapshotsComparator(); 816 HeapSnapshotsDiff* Compare(HeapSnapshot* snapshot1, HeapSnapshot* snapshot2); 817 private: 818 List<HeapSnapshotsDiff*> diffs_; 819 820 DISALLOW_COPY_AND_ASSIGN(HeapSnapshotsComparator); 821}; 822 823 824class HeapSnapshotsCollection { 825 public: 826 HeapSnapshotsCollection(); 827 ~HeapSnapshotsCollection(); 828 829 bool is_tracking_objects() { return is_tracking_objects_; } 830 831 HeapSnapshot* NewSnapshot( 832 HeapSnapshot::Type type, const char* name, unsigned uid); 833 void SnapshotGenerationFinished(HeapSnapshot* snapshot); 834 List<HeapSnapshot*>* snapshots() { return &snapshots_; } 835 HeapSnapshot* GetSnapshot(unsigned uid); 836 837 const char* GetName(String* name) { return names_.GetName(name); } 838 const char* GetName(int index) { return names_.GetName(index); } 839 const char* GetFunctionName(String* name) { 840 return names_.GetFunctionName(name); 841 } 842 843 TokenEnumerator* token_enumerator() { return token_enumerator_; } 844 845 uint64_t GetObjectId(Address addr) { return ids_.FindObject(addr); } 846 void ObjectMoveEvent(Address from, Address to) { ids_.MoveObject(from, to); } 847 848 HeapSnapshotsDiff* CompareSnapshots(HeapSnapshot* snapshot1, 849 HeapSnapshot* snapshot2); 850 851 private: 852 INLINE(static bool HeapSnapshotsMatch(void* key1, void* key2)) { 853 return key1 == key2; 854 } 855 856 bool is_tracking_objects_; // Whether tracking object moves is needed. 857 List<HeapSnapshot*> snapshots_; 858 // Mapping from snapshots' uids to HeapSnapshot* pointers. 859 HashMap snapshots_uids_; 860 StringsStorage names_; 861 TokenEnumerator* token_enumerator_; 862 // Mapping from HeapObject addresses to objects' uids. 863 HeapObjectsMap ids_; 864 HeapSnapshotsComparator comparator_; 865 866 DISALLOW_COPY_AND_ASSIGN(HeapSnapshotsCollection); 867}; 868 869 870// The HeapEntriesMap instance is used to track a mapping between 871// real heap objects and their representations in heap snapshots. 872class HeapEntriesMap { 873 public: 874 HeapEntriesMap(); 875 ~HeapEntriesMap(); 876 877 HeapEntry* Map(HeapObject* object); 878 void Pair(HeapObject* object, HeapEntry* entry); 879 void CountReference(HeapObject* from, HeapObject* to, 880 int* prev_children_count = NULL, 881 int* prev_retainers_count = NULL); 882 template<class Visitor> 883 void UpdateEntries(Visitor* visitor); 884 885 int entries_count() { return entries_count_; } 886 int total_children_count() { return total_children_count_; } 887 int total_retainers_count() { return total_retainers_count_; } 888 889 static HeapEntry *const kHeapEntryPlaceholder; 890 891 private: 892 struct EntryInfo { 893 explicit EntryInfo(HeapEntry* entry) 894 : entry(entry), children_count(0), retainers_count(0) { } 895 HeapEntry* entry; 896 int children_count; 897 int retainers_count; 898 }; 899 900 static uint32_t Hash(HeapObject* object) { 901 return ComputeIntegerHash( 902 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(object))); 903 } 904 static bool HeapObjectsMatch(void* key1, void* key2) { return key1 == key2; } 905 906 HashMap entries_; 907 int entries_count_; 908 int total_children_count_; 909 int total_retainers_count_; 910 911 friend class HeapObjectsSet; 912 913 DISALLOW_COPY_AND_ASSIGN(HeapEntriesMap); 914}; 915 916 917class HeapObjectsSet { 918 public: 919 HeapObjectsSet(); 920 void Clear(); 921 bool Contains(Object* object); 922 void Insert(Object* obj); 923 924 private: 925 HashMap entries_; 926 927 DISALLOW_COPY_AND_ASSIGN(HeapObjectsSet); 928}; 929 930 931class HeapSnapshotGenerator { 932 public: 933 class SnapshotFillerInterface { 934 public: 935 virtual ~SnapshotFillerInterface() { } 936 virtual HeapEntry* AddEntry(HeapObject* obj) = 0; 937 virtual void SetIndexedReference(HeapGraphEdge::Type type, 938 HeapObject* parent_obj, 939 HeapEntry* parent_entry, 940 int index, 941 Object* child_obj, 942 HeapEntry* child_entry) = 0; 943 virtual void SetNamedReference(HeapGraphEdge::Type type, 944 HeapObject* parent_obj, 945 HeapEntry* parent_entry, 946 const char* reference_name, 947 Object* child_obj, 948 HeapEntry* child_entry) = 0; 949 virtual void SetRootGcRootsReference() = 0; 950 virtual void SetRootShortcutReference(Object* child_obj, 951 HeapEntry* child_entry) = 0; 952 virtual void SetStrongRootReference(Object* child_obj, 953 HeapEntry* child_entry) = 0; 954 }; 955 956 HeapSnapshotGenerator(HeapSnapshot* snapshot, 957 v8::ActivityControl* control); 958 bool GenerateSnapshot(); 959 960 private: 961 bool ApproximateRetainedSizes(); 962 bool BuildDominatorTree(const Vector<HeapEntry*>& entries, 963 Vector<HeapEntry*>* dominators); 964 bool CountEntriesAndReferences(); 965 HeapEntry* GetEntry(Object* obj); 966 void IncProgressCounter() { ++progress_counter_; } 967 void ExtractReferences(HeapObject* obj); 968 void ExtractClosureReferences(JSObject* js_obj, HeapEntry* entry); 969 void ExtractPropertyReferences(JSObject* js_obj, HeapEntry* entry); 970 void ExtractElementReferences(JSObject* js_obj, HeapEntry* entry); 971 void ExtractInternalReferences(JSObject* js_obj, HeapEntry* entry); 972 bool FillReferences(); 973 void FillReversePostorderIndexes(Vector<HeapEntry*>* entries); 974 bool IterateAndExtractReferences(); 975 inline bool ReportProgress(bool force = false); 976 bool SetEntriesDominators(); 977 void SetClosureReference(HeapObject* parent_obj, 978 HeapEntry* parent, 979 String* reference_name, 980 Object* child); 981 void SetElementReference(HeapObject* parent_obj, 982 HeapEntry* parent, 983 int index, 984 Object* child); 985 void SetInternalReference(HeapObject* parent_obj, 986 HeapEntry* parent, 987 const char* reference_name, 988 Object* child); 989 void SetInternalReference(HeapObject* parent_obj, 990 HeapEntry* parent, 991 int index, 992 Object* child); 993 void SetHiddenReference(HeapObject* parent_obj, 994 HeapEntry* parent, 995 int index, 996 Object* child); 997 void SetPropertyReference(HeapObject* parent_obj, 998 HeapEntry* parent, 999 String* reference_name, 1000 Object* child); 1001 void SetPropertyShortcutReference(HeapObject* parent_obj, 1002 HeapEntry* parent, 1003 String* reference_name, 1004 Object* child); 1005 void SetRootShortcutReference(Object* child); 1006 void SetRootGcRootsReference(); 1007 void SetGcRootsReference(Object* child); 1008 void SetProgressTotal(int iterations_count); 1009 1010 HeapSnapshot* snapshot_; 1011 v8::ActivityControl* control_; 1012 HeapSnapshotsCollection* collection_; 1013 // Mapping from HeapObject* pointers to HeapEntry* pointers. 1014 HeapEntriesMap entries_; 1015 SnapshotFillerInterface* filler_; 1016 // Used during references extraction to mark heap objects that 1017 // are references via non-hidden properties. 1018 HeapObjectsSet known_references_; 1019 // Used during snapshot generation. 1020 int progress_counter_; 1021 int progress_total_; 1022 1023 friend class IndexedReferencesExtractor; 1024 friend class RootsReferencesExtractor; 1025 1026 DISALLOW_COPY_AND_ASSIGN(HeapSnapshotGenerator); 1027}; 1028 1029class OutputStreamWriter; 1030 1031class HeapSnapshotJSONSerializer { 1032 public: 1033 explicit HeapSnapshotJSONSerializer(HeapSnapshot* snapshot) 1034 : snapshot_(snapshot), 1035 nodes_(ObjectsMatch), 1036 strings_(ObjectsMatch), 1037 next_node_id_(1), 1038 next_string_id_(1), 1039 writer_(NULL) { 1040 } 1041 void Serialize(v8::OutputStream* stream); 1042 1043 private: 1044 INLINE(static bool ObjectsMatch(void* key1, void* key2)) { 1045 return key1 == key2; 1046 } 1047 1048 INLINE(static uint32_t ObjectHash(const void* key)) { 1049 return ComputeIntegerHash( 1050 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(key))); 1051 } 1052 1053 void EnumerateNodes(); 1054 int GetNodeId(HeapEntry* entry); 1055 int GetStringId(const char* s); 1056 void SerializeEdge(HeapGraphEdge* edge); 1057 void SerializeImpl(); 1058 void SerializeNode(HeapEntry* entry); 1059 void SerializeNodes(); 1060 void SerializeSnapshot(); 1061 void SerializeString(const unsigned char* s); 1062 void SerializeStrings(); 1063 void SortHashMap(HashMap* map, List<HashMap::Entry*>* sorted_entries); 1064 1065 HeapSnapshot* snapshot_; 1066 HashMap nodes_; 1067 HashMap strings_; 1068 int next_node_id_; 1069 int next_string_id_; 1070 OutputStreamWriter* writer_; 1071 1072 friend class HeapSnapshotJSONSerializerEnumerator; 1073 friend class HeapSnapshotJSONSerializerIterator; 1074 1075 DISALLOW_COPY_AND_ASSIGN(HeapSnapshotJSONSerializer); 1076}; 1077 1078 1079String* GetConstructorNameForHeapProfile(JSObject* object); 1080 1081} } // namespace v8::internal 1082 1083#endif // ENABLE_LOGGING_AND_PROFILING 1084 1085#endif // V8_PROFILE_GENERATOR_H_ 1086