heap-profiler.cc revision d91b9f7d46489a9ee00f9cb415630299c76a502b
1// Copyright 2009 the V8 project authors. All rights reserved. 2// Redistribution and use in source and binary forms, with or without 3// modification, are permitted provided that the following conditions are 4// met: 5// 6// * Redistributions of source code must retain the above copyright 7// notice, this list of conditions and the following disclaimer. 8// * Redistributions in binary form must reproduce the above 9// copyright notice, this list of conditions and the following 10// disclaimer in the documentation and/or other materials provided 11// with the distribution. 12// * Neither the name of Google Inc. nor the names of its 13// contributors may be used to endorse or promote products derived 14// from this software without specific prior written permission. 15// 16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 28#include "v8.h" 29 30#include "heap-profiler.h" 31#include "frames-inl.h" 32#include "global-handles.h" 33#include "string-stream.h" 34 35namespace v8 { 36namespace internal { 37 38 39#ifdef ENABLE_LOGGING_AND_PROFILING 40namespace { 41 42// Clusterizer is a set of helper functions for converting 43// object references into clusters. 44class Clusterizer : public AllStatic { 45 public: 46 static JSObjectsCluster Clusterize(HeapObject* obj) { 47 return Clusterize(obj, true); 48 } 49 static void InsertIntoTree(JSObjectsClusterTree* tree, 50 HeapObject* obj, bool fine_grain); 51 static void InsertReferenceIntoTree(JSObjectsClusterTree* tree, 52 const JSObjectsCluster& cluster) { 53 InsertIntoTree(tree, cluster, 0); 54 } 55 56 private: 57 static JSObjectsCluster Clusterize(HeapObject* obj, bool fine_grain); 58 static int CalculateNetworkSize(JSObject* obj); 59 static int GetObjectSize(HeapObject* obj) { 60 return obj->IsJSObject() ? 61 CalculateNetworkSize(JSObject::cast(obj)) : obj->Size(); 62 } 63 static void InsertIntoTree(JSObjectsClusterTree* tree, 64 const JSObjectsCluster& cluster, int size); 65}; 66 67 68JSObjectsCluster Clusterizer::Clusterize(HeapObject* obj, bool fine_grain) { 69 if (obj->IsJSObject()) { 70 JSObject* js_obj = JSObject::cast(obj); 71 String* constructor = JSObject::cast(js_obj)->constructor_name(); 72 // Differentiate Object and Array instances. 73 if (fine_grain && (constructor == Heap::Object_symbol() || 74 constructor == Heap::Array_symbol())) { 75 return JSObjectsCluster(constructor, obj); 76 } else { 77 return JSObjectsCluster(constructor); 78 } 79 } else if (obj->IsString()) { 80 return JSObjectsCluster(Heap::String_symbol()); 81 } else if (obj->IsJSGlobalPropertyCell()) { 82 return JSObjectsCluster(JSObjectsCluster::GLOBAL_PROPERTY); 83 } else if (obj->IsCode() || obj->IsSharedFunctionInfo() || obj->IsScript()) { 84 return JSObjectsCluster(JSObjectsCluster::CODE); 85 } 86 return JSObjectsCluster(); 87} 88 89 90void Clusterizer::InsertIntoTree(JSObjectsClusterTree* tree, 91 HeapObject* obj, bool fine_grain) { 92 JSObjectsCluster cluster = Clusterize(obj, fine_grain); 93 if (cluster.is_null()) return; 94 InsertIntoTree(tree, cluster, GetObjectSize(obj)); 95} 96 97 98void Clusterizer::InsertIntoTree(JSObjectsClusterTree* tree, 99 const JSObjectsCluster& cluster, int size) { 100 JSObjectsClusterTree::Locator loc; 101 tree->Insert(cluster, &loc); 102 NumberAndSizeInfo number_and_size = loc.value(); 103 number_and_size.increment_number(1); 104 number_and_size.increment_bytes(size); 105 loc.set_value(number_and_size); 106} 107 108 109int Clusterizer::CalculateNetworkSize(JSObject* obj) { 110 int size = obj->Size(); 111 // If 'properties' and 'elements' are non-empty (thus, non-shared), 112 // take their size into account. 113 if (FixedArray::cast(obj->properties())->length() != 0) { 114 size += obj->properties()->Size(); 115 } 116 if (FixedArray::cast(obj->elements())->length() != 0) { 117 size += obj->elements()->Size(); 118 } 119 // For functions, also account non-empty context and literals sizes. 120 if (obj->IsJSFunction()) { 121 JSFunction* f = JSFunction::cast(obj); 122 if (f->unchecked_context()->IsContext()) { 123 size += f->context()->Size(); 124 } 125 if (f->literals()->length() != 0) { 126 size += f->literals()->Size(); 127 } 128 } 129 return size; 130} 131 132 133// A helper class for recording back references. 134class ReferencesExtractor : public ObjectVisitor { 135 public: 136 ReferencesExtractor(const JSObjectsCluster& cluster, 137 RetainerHeapProfile* profile) 138 : cluster_(cluster), 139 profile_(profile), 140 inside_array_(false) { 141 } 142 143 void VisitPointer(Object** o) { 144 if ((*o)->IsFixedArray() && !inside_array_) { 145 // Traverse one level deep for data members that are fixed arrays. 146 // This covers the case of 'elements' and 'properties' of JSObject, 147 // and function contexts. 148 inside_array_ = true; 149 FixedArray::cast(*o)->Iterate(this); 150 inside_array_ = false; 151 } else if ((*o)->IsHeapObject()) { 152 profile_->StoreReference(cluster_, HeapObject::cast(*o)); 153 } 154 } 155 156 void VisitPointers(Object** start, Object** end) { 157 for (Object** p = start; p < end; p++) VisitPointer(p); 158 } 159 160 private: 161 const JSObjectsCluster& cluster_; 162 RetainerHeapProfile* profile_; 163 bool inside_array_; 164}; 165 166 167// A printer interface implementation for the Retainers profile. 168class RetainersPrinter : public RetainerHeapProfile::Printer { 169 public: 170 void PrintRetainers(const JSObjectsCluster& cluster, 171 const StringStream& retainers) { 172 HeapStringAllocator allocator; 173 StringStream stream(&allocator); 174 cluster.Print(&stream); 175 LOG(HeapSampleJSRetainersEvent( 176 *(stream.ToCString()), *(retainers.ToCString()))); 177 } 178}; 179 180 181// Visitor for printing a cluster tree. 182class ClusterTreePrinter BASE_EMBEDDED { 183 public: 184 explicit ClusterTreePrinter(StringStream* stream) : stream_(stream) {} 185 void Call(const JSObjectsCluster& cluster, 186 const NumberAndSizeInfo& number_and_size) { 187 Print(stream_, cluster, number_and_size); 188 } 189 static void Print(StringStream* stream, 190 const JSObjectsCluster& cluster, 191 const NumberAndSizeInfo& number_and_size); 192 193 private: 194 StringStream* stream_; 195}; 196 197 198void ClusterTreePrinter::Print(StringStream* stream, 199 const JSObjectsCluster& cluster, 200 const NumberAndSizeInfo& number_and_size) { 201 stream->Put(','); 202 cluster.Print(stream); 203 stream->Add(";%d", number_and_size.number()); 204} 205 206 207// Visitor for printing a retainer tree. 208class SimpleRetainerTreePrinter BASE_EMBEDDED { 209 public: 210 explicit SimpleRetainerTreePrinter(RetainerHeapProfile::Printer* printer) 211 : printer_(printer) {} 212 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree); 213 214 private: 215 RetainerHeapProfile::Printer* printer_; 216}; 217 218 219void SimpleRetainerTreePrinter::Call(const JSObjectsCluster& cluster, 220 JSObjectsClusterTree* tree) { 221 HeapStringAllocator allocator; 222 StringStream stream(&allocator); 223 ClusterTreePrinter retainers_printer(&stream); 224 tree->ForEach(&retainers_printer); 225 printer_->PrintRetainers(cluster, stream); 226} 227 228 229// Visitor for aggregating references count of equivalent clusters. 230class RetainersAggregator BASE_EMBEDDED { 231 public: 232 RetainersAggregator(ClustersCoarser* coarser, JSObjectsClusterTree* dest_tree) 233 : coarser_(coarser), dest_tree_(dest_tree) {} 234 void Call(const JSObjectsCluster& cluster, 235 const NumberAndSizeInfo& number_and_size); 236 237 private: 238 ClustersCoarser* coarser_; 239 JSObjectsClusterTree* dest_tree_; 240}; 241 242 243void RetainersAggregator::Call(const JSObjectsCluster& cluster, 244 const NumberAndSizeInfo& number_and_size) { 245 JSObjectsCluster eq = coarser_->GetCoarseEquivalent(cluster); 246 if (eq.is_null()) eq = cluster; 247 JSObjectsClusterTree::Locator loc; 248 dest_tree_->Insert(eq, &loc); 249 NumberAndSizeInfo aggregated_number = loc.value(); 250 aggregated_number.increment_number(number_and_size.number()); 251 loc.set_value(aggregated_number); 252} 253 254 255// Visitor for printing retainers tree. Aggregates equivalent retainer clusters. 256class AggregatingRetainerTreePrinter BASE_EMBEDDED { 257 public: 258 AggregatingRetainerTreePrinter(ClustersCoarser* coarser, 259 RetainerHeapProfile::Printer* printer) 260 : coarser_(coarser), printer_(printer) {} 261 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree); 262 263 private: 264 ClustersCoarser* coarser_; 265 RetainerHeapProfile::Printer* printer_; 266}; 267 268 269void AggregatingRetainerTreePrinter::Call(const JSObjectsCluster& cluster, 270 JSObjectsClusterTree* tree) { 271 if (!coarser_->GetCoarseEquivalent(cluster).is_null()) return; 272 JSObjectsClusterTree dest_tree_; 273 RetainersAggregator retainers_aggregator(coarser_, &dest_tree_); 274 tree->ForEach(&retainers_aggregator); 275 HeapStringAllocator allocator; 276 StringStream stream(&allocator); 277 ClusterTreePrinter retainers_printer(&stream); 278 dest_tree_.ForEach(&retainers_printer); 279 printer_->PrintRetainers(cluster, stream); 280} 281 282 283// A helper class for building a retainers tree, that aggregates 284// all equivalent clusters. 285class RetainerTreeAggregator BASE_EMBEDDED { 286 public: 287 explicit RetainerTreeAggregator(ClustersCoarser* coarser) 288 : coarser_(coarser) {} 289 void Process(JSObjectsRetainerTree* input_tree) { 290 input_tree->ForEach(this); 291 } 292 void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree); 293 JSObjectsRetainerTree& output_tree() { return output_tree_; } 294 295 private: 296 ClustersCoarser* coarser_; 297 JSObjectsRetainerTree output_tree_; 298}; 299 300 301void RetainerTreeAggregator::Call(const JSObjectsCluster& cluster, 302 JSObjectsClusterTree* tree) { 303 JSObjectsCluster eq = coarser_->GetCoarseEquivalent(cluster); 304 if (eq.is_null()) return; 305 JSObjectsRetainerTree::Locator loc; 306 if (output_tree_.Insert(eq, &loc)) { 307 loc.set_value(new JSObjectsClusterTree()); 308 } 309 RetainersAggregator retainers_aggregator(coarser_, loc.value()); 310 tree->ForEach(&retainers_aggregator); 311} 312 313} // namespace 314 315 316const JSObjectsClusterTreeConfig::Key JSObjectsClusterTreeConfig::kNoKey; 317const JSObjectsClusterTreeConfig::Value JSObjectsClusterTreeConfig::kNoValue; 318 319 320ConstructorHeapProfile::ConstructorHeapProfile() 321 : zscope_(DELETE_ON_EXIT) { 322} 323 324 325void ConstructorHeapProfile::Call(const JSObjectsCluster& cluster, 326 const NumberAndSizeInfo& number_and_size) { 327 HeapStringAllocator allocator; 328 StringStream stream(&allocator); 329 cluster.Print(&stream); 330 LOG(HeapSampleJSConstructorEvent(*(stream.ToCString()), 331 number_and_size.number(), 332 number_and_size.bytes())); 333} 334 335 336void ConstructorHeapProfile::CollectStats(HeapObject* obj) { 337 Clusterizer::InsertIntoTree(&js_objects_info_tree_, obj, false); 338} 339 340 341void ConstructorHeapProfile::PrintStats() { 342 js_objects_info_tree_.ForEach(this); 343} 344 345 346static const char* GetConstructorName(const char* name) { 347 return name[0] != '\0' ? name : "(anonymous)"; 348} 349 350 351void JSObjectsCluster::Print(StringStream* accumulator) const { 352 ASSERT(!is_null()); 353 if (constructor_ == FromSpecialCase(ROOTS)) { 354 accumulator->Add("(roots)"); 355 } else if (constructor_ == FromSpecialCase(GLOBAL_PROPERTY)) { 356 accumulator->Add("(global property)"); 357 } else if (constructor_ == FromSpecialCase(CODE)) { 358 accumulator->Add("(code)"); 359 } else if (constructor_ == FromSpecialCase(SELF)) { 360 accumulator->Add("(self)"); 361 } else { 362 SmartPointer<char> s_name( 363 constructor_->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL)); 364 accumulator->Add("%s", GetConstructorName(*s_name)); 365 if (instance_ != NULL) { 366 accumulator->Add(":%p", static_cast<void*>(instance_)); 367 } 368 } 369} 370 371 372void JSObjectsCluster::DebugPrint(StringStream* accumulator) const { 373 if (!is_null()) { 374 Print(accumulator); 375 } else { 376 accumulator->Add("(null cluster)"); 377 } 378} 379 380 381inline ClustersCoarser::ClusterBackRefs::ClusterBackRefs( 382 const JSObjectsCluster& cluster_) 383 : cluster(cluster_), refs(kInitialBackrefsListCapacity) { 384} 385 386 387inline ClustersCoarser::ClusterBackRefs::ClusterBackRefs( 388 const ClustersCoarser::ClusterBackRefs& src) 389 : cluster(src.cluster), refs(src.refs.capacity()) { 390 refs.AddAll(src.refs); 391} 392 393 394inline ClustersCoarser::ClusterBackRefs& 395 ClustersCoarser::ClusterBackRefs::operator=( 396 const ClustersCoarser::ClusterBackRefs& src) { 397 if (this == &src) return *this; 398 cluster = src.cluster; 399 refs.Clear(); 400 refs.AddAll(src.refs); 401 return *this; 402} 403 404 405inline int ClustersCoarser::ClusterBackRefs::Compare( 406 const ClustersCoarser::ClusterBackRefs& a, 407 const ClustersCoarser::ClusterBackRefs& b) { 408 int cmp = JSObjectsCluster::CompareConstructors(a.cluster, b.cluster); 409 if (cmp != 0) return cmp; 410 if (a.refs.length() < b.refs.length()) return -1; 411 if (a.refs.length() > b.refs.length()) return 1; 412 for (int i = 0; i < a.refs.length(); ++i) { 413 int cmp = JSObjectsCluster::Compare(a.refs[i], b.refs[i]); 414 if (cmp != 0) return cmp; 415 } 416 return 0; 417} 418 419 420ClustersCoarser::ClustersCoarser() 421 : zscope_(DELETE_ON_EXIT), 422 sim_list_(ClustersCoarser::kInitialSimilarityListCapacity), 423 current_pair_(NULL), 424 current_set_(NULL), 425 self_(NULL) { 426} 427 428 429void ClustersCoarser::Call(const JSObjectsCluster& cluster, 430 JSObjectsClusterTree* tree) { 431 if (!cluster.can_be_coarsed()) return; 432 ClusterBackRefs pair(cluster); 433 ASSERT(current_pair_ == NULL); 434 current_pair_ = &pair; 435 current_set_ = new JSObjectsRetainerTree(); 436 self_ = &cluster; 437 tree->ForEach(this); 438 sim_list_.Add(pair); 439 current_pair_ = NULL; 440 current_set_ = NULL; 441 self_ = NULL; 442} 443 444 445void ClustersCoarser::Call(const JSObjectsCluster& cluster, 446 const NumberAndSizeInfo& number_and_size) { 447 ASSERT(current_pair_ != NULL); 448 ASSERT(current_set_ != NULL); 449 ASSERT(self_ != NULL); 450 JSObjectsRetainerTree::Locator loc; 451 if (JSObjectsCluster::Compare(*self_, cluster) == 0) { 452 current_pair_->refs.Add(JSObjectsCluster(JSObjectsCluster::SELF)); 453 return; 454 } 455 JSObjectsCluster eq = GetCoarseEquivalent(cluster); 456 if (!eq.is_null()) { 457 if (current_set_->Find(eq, &loc)) return; 458 current_pair_->refs.Add(eq); 459 current_set_->Insert(eq, &loc); 460 } else { 461 current_pair_->refs.Add(cluster); 462 } 463} 464 465 466void ClustersCoarser::Process(JSObjectsRetainerTree* tree) { 467 int last_eq_clusters = -1; 468 for (int i = 0; i < kMaxPassesCount; ++i) { 469 sim_list_.Clear(); 470 const int curr_eq_clusters = DoProcess(tree); 471 // If no new cluster equivalents discovered, abort processing. 472 if (last_eq_clusters == curr_eq_clusters) break; 473 last_eq_clusters = curr_eq_clusters; 474 } 475} 476 477 478int ClustersCoarser::DoProcess(JSObjectsRetainerTree* tree) { 479 tree->ForEach(this); 480 sim_list_.Iterate(ClusterBackRefs::SortRefsIterator); 481 sim_list_.Sort(ClusterBackRefsCmp); 482 return FillEqualityTree(); 483} 484 485 486JSObjectsCluster ClustersCoarser::GetCoarseEquivalent( 487 const JSObjectsCluster& cluster) { 488 if (!cluster.can_be_coarsed()) return JSObjectsCluster(); 489 EqualityTree::Locator loc; 490 return eq_tree_.Find(cluster, &loc) ? loc.value() : JSObjectsCluster(); 491} 492 493 494bool ClustersCoarser::HasAnEquivalent(const JSObjectsCluster& cluster) { 495 // Return true for coarsible clusters that have a non-identical equivalent. 496 if (!cluster.can_be_coarsed()) return false; 497 JSObjectsCluster eq = GetCoarseEquivalent(cluster); 498 return !eq.is_null() && JSObjectsCluster::Compare(cluster, eq) != 0; 499} 500 501 502int ClustersCoarser::FillEqualityTree() { 503 int eq_clusters_count = 0; 504 int eq_to = 0; 505 bool first_added = false; 506 for (int i = 1; i < sim_list_.length(); ++i) { 507 if (ClusterBackRefs::Compare(sim_list_[i], sim_list_[eq_to]) == 0) { 508 EqualityTree::Locator loc; 509 if (!first_added) { 510 // Add self-equivalence, if we have more than one item in this 511 // equivalence class. 512 eq_tree_.Insert(sim_list_[eq_to].cluster, &loc); 513 loc.set_value(sim_list_[eq_to].cluster); 514 first_added = true; 515 } 516 eq_tree_.Insert(sim_list_[i].cluster, &loc); 517 loc.set_value(sim_list_[eq_to].cluster); 518 ++eq_clusters_count; 519 } else { 520 eq_to = i; 521 first_added = false; 522 } 523 } 524 return eq_clusters_count; 525} 526 527 528const JSObjectsCluster ClustersCoarser::ClusterEqualityConfig::kNoKey; 529const JSObjectsCluster ClustersCoarser::ClusterEqualityConfig::kNoValue; 530const JSObjectsRetainerTreeConfig::Key JSObjectsRetainerTreeConfig::kNoKey; 531const JSObjectsRetainerTreeConfig::Value JSObjectsRetainerTreeConfig::kNoValue = 532 NULL; 533 534 535RetainerHeapProfile::RetainerHeapProfile() 536 : zscope_(DELETE_ON_EXIT) { 537 JSObjectsCluster roots(JSObjectsCluster::ROOTS); 538 ReferencesExtractor extractor(roots, this); 539 Heap::IterateRoots(&extractor, VISIT_ONLY_STRONG); 540} 541 542 543void RetainerHeapProfile::StoreReference(const JSObjectsCluster& cluster, 544 HeapObject* ref) { 545 JSObjectsCluster ref_cluster = Clusterizer::Clusterize(ref); 546 if (ref_cluster.is_null()) return; 547 JSObjectsRetainerTree::Locator ref_loc; 548 if (retainers_tree_.Insert(ref_cluster, &ref_loc)) { 549 ref_loc.set_value(new JSObjectsClusterTree()); 550 } 551 JSObjectsClusterTree* referenced_by = ref_loc.value(); 552 Clusterizer::InsertReferenceIntoTree(referenced_by, cluster); 553} 554 555 556void RetainerHeapProfile::CollectStats(HeapObject* obj) { 557 const JSObjectsCluster cluster = Clusterizer::Clusterize(obj); 558 if (cluster.is_null()) return; 559 ReferencesExtractor extractor(cluster, this); 560 obj->Iterate(&extractor); 561} 562 563 564void RetainerHeapProfile::DebugPrintStats( 565 RetainerHeapProfile::Printer* printer) { 566 coarser_.Process(&retainers_tree_); 567 // Print clusters that have no equivalents, aggregating their retainers. 568 AggregatingRetainerTreePrinter agg_printer(&coarser_, printer); 569 retainers_tree_.ForEach(&agg_printer); 570 // Now aggregate clusters that have equivalents... 571 RetainerTreeAggregator aggregator(&coarser_); 572 aggregator.Process(&retainers_tree_); 573 // ...and print them. 574 SimpleRetainerTreePrinter s_printer(printer); 575 aggregator.output_tree().ForEach(&s_printer); 576} 577 578 579void RetainerHeapProfile::PrintStats() { 580 RetainersPrinter printer; 581 DebugPrintStats(&printer); 582} 583 584 585// 586// HeapProfiler class implementation. 587// 588void HeapProfiler::CollectStats(HeapObject* obj, HistogramInfo* info) { 589 InstanceType type = obj->map()->instance_type(); 590 ASSERT(0 <= type && type <= LAST_TYPE); 591 if (!FreeListNode::IsFreeListNode(obj)) { 592 info[type].increment_number(1); 593 info[type].increment_bytes(obj->Size()); 594 } 595} 596 597 598static void StackWeakReferenceCallback(Persistent<Value> object, 599 void* trace) { 600 DeleteArray(static_cast<Address*>(trace)); 601 object.Dispose(); 602} 603 604 605static void PrintProducerStackTrace(Object* obj, void* trace) { 606 if (!obj->IsJSObject()) return; 607 String* constructor = JSObject::cast(obj)->constructor_name(); 608 SmartPointer<char> s_name( 609 constructor->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL)); 610 LOG(HeapSampleJSProducerEvent(GetConstructorName(*s_name), 611 reinterpret_cast<Address*>(trace))); 612} 613 614 615void HeapProfiler::WriteSample() { 616 LOG(HeapSampleBeginEvent("Heap", "allocated")); 617 LOG(HeapSampleStats( 618 "Heap", "allocated", Heap::CommittedMemory(), Heap::SizeOfObjects())); 619 620 HistogramInfo info[LAST_TYPE+1]; 621#define DEF_TYPE_NAME(name) info[name].set_name(#name); 622 INSTANCE_TYPE_LIST(DEF_TYPE_NAME) 623#undef DEF_TYPE_NAME 624 625 ConstructorHeapProfile js_cons_profile; 626 RetainerHeapProfile js_retainer_profile; 627 HeapIterator iterator; 628 for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) { 629 CollectStats(obj, info); 630 js_cons_profile.CollectStats(obj); 631 js_retainer_profile.CollectStats(obj); 632 } 633 634 // Lump all the string types together. 635 int string_number = 0; 636 int string_bytes = 0; 637#define INCREMENT_SIZE(type, size, name, camel_name) \ 638 string_number += info[type].number(); \ 639 string_bytes += info[type].bytes(); 640 STRING_TYPE_LIST(INCREMENT_SIZE) 641#undef INCREMENT_SIZE 642 if (string_bytes > 0) { 643 LOG(HeapSampleItemEvent("STRING_TYPE", string_number, string_bytes)); 644 } 645 646 for (int i = FIRST_NONSTRING_TYPE; i <= LAST_TYPE; ++i) { 647 if (info[i].bytes() > 0) { 648 LOG(HeapSampleItemEvent(info[i].name(), info[i].number(), 649 info[i].bytes())); 650 } 651 } 652 653 js_cons_profile.PrintStats(); 654 js_retainer_profile.PrintStats(); 655 656 GlobalHandles::IterateWeakRoots(PrintProducerStackTrace, 657 StackWeakReferenceCallback); 658 659 LOG(HeapSampleEndEvent("Heap", "allocated")); 660} 661 662 663bool ProducerHeapProfile::can_log_ = false; 664 665void ProducerHeapProfile::Setup() { 666 can_log_ = true; 667} 668 669void ProducerHeapProfile::DoRecordJSObjectAllocation(Object* obj) { 670 ASSERT(FLAG_log_producers); 671 if (!can_log_) return; 672 int framesCount = 0; 673 for (JavaScriptFrameIterator it; !it.done(); it.Advance()) { 674 ++framesCount; 675 } 676 if (framesCount == 0) return; 677 ++framesCount; // Reserve place for the terminator item. 678 Vector<Address> stack(NewArray<Address>(framesCount), framesCount); 679 int i = 0; 680 for (JavaScriptFrameIterator it; !it.done(); it.Advance()) { 681 stack[i++] = it.frame()->pc(); 682 } 683 stack[i] = NULL; 684 Handle<Object> handle = GlobalHandles::Create(obj); 685 GlobalHandles::MakeWeak(handle.location(), 686 static_cast<void*>(stack.start()), 687 StackWeakReferenceCallback); 688} 689 690 691#endif // ENABLE_LOGGING_AND_PROFILING 692 693 694} } // namespace v8::internal 695