1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#include "src/profile-generator-inl.h"
8
9#include "src/compiler.h"
10#include "src/debug.h"
11#include "src/sampler.h"
12#include "src/global-handles.h"
13#include "src/scopeinfo.h"
14#include "src/unicode.h"
15#include "src/zone-inl.h"
16
17namespace v8 {
18namespace internal {
19
20
21bool StringsStorage::StringsMatch(void* key1, void* key2) {
22  return strcmp(reinterpret_cast<char*>(key1),
23                reinterpret_cast<char*>(key2)) == 0;
24}
25
26
27StringsStorage::StringsStorage(Heap* heap)
28    : hash_seed_(heap->HashSeed()), names_(StringsMatch) {
29}
30
31
32StringsStorage::~StringsStorage() {
33  for (HashMap::Entry* p = names_.Start();
34       p != NULL;
35       p = names_.Next(p)) {
36    DeleteArray(reinterpret_cast<const char*>(p->value));
37  }
38}
39
40
41const char* StringsStorage::GetCopy(const char* src) {
42  int len = static_cast<int>(strlen(src));
43  HashMap::Entry* entry = GetEntry(src, len);
44  if (entry->value == NULL) {
45    Vector<char> dst = Vector<char>::New(len + 1);
46    StrNCpy(dst, src, len);
47    dst[len] = '\0';
48    entry->key = dst.start();
49    entry->value = entry->key;
50  }
51  return reinterpret_cast<const char*>(entry->value);
52}
53
54
55const char* StringsStorage::GetFormatted(const char* format, ...) {
56  va_list args;
57  va_start(args, format);
58  const char* result = GetVFormatted(format, args);
59  va_end(args);
60  return result;
61}
62
63
64const char* StringsStorage::AddOrDisposeString(char* str, int len) {
65  HashMap::Entry* entry = GetEntry(str, len);
66  if (entry->value == NULL) {
67    // New entry added.
68    entry->key = str;
69    entry->value = str;
70  } else {
71    DeleteArray(str);
72  }
73  return reinterpret_cast<const char*>(entry->value);
74}
75
76
77const char* StringsStorage::GetVFormatted(const char* format, va_list args) {
78  Vector<char> str = Vector<char>::New(1024);
79  int len = VSNPrintF(str, format, args);
80  if (len == -1) {
81    DeleteArray(str.start());
82    return GetCopy(format);
83  }
84  return AddOrDisposeString(str.start(), len);
85}
86
87
88const char* StringsStorage::GetName(Name* name) {
89  if (name->IsString()) {
90    String* str = String::cast(name);
91    int length = Min(kMaxNameSize, str->length());
92    int actual_length = 0;
93    SmartArrayPointer<char> data =
94        str->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0, length,
95                       &actual_length);
96    return AddOrDisposeString(data.Detach(), actual_length);
97  } else if (name->IsSymbol()) {
98    return "<symbol>";
99  }
100  return "";
101}
102
103
104const char* StringsStorage::GetName(int index) {
105  return GetFormatted("%d", index);
106}
107
108
109const char* StringsStorage::GetFunctionName(Name* name) {
110  return BeautifyFunctionName(GetName(name));
111}
112
113
114const char* StringsStorage::GetFunctionName(const char* name) {
115  return BeautifyFunctionName(GetCopy(name));
116}
117
118
119const char* StringsStorage::BeautifyFunctionName(const char* name) {
120  return (*name == 0) ? ProfileGenerator::kAnonymousFunctionName : name;
121}
122
123
124size_t StringsStorage::GetUsedMemorySize() const {
125  size_t size = sizeof(*this);
126  size += sizeof(HashMap::Entry) * names_.capacity();
127  for (HashMap::Entry* p = names_.Start(); p != NULL; p = names_.Next(p)) {
128    size += strlen(reinterpret_cast<const char*>(p->value)) + 1;
129  }
130  return size;
131}
132
133
134HashMap::Entry* StringsStorage::GetEntry(const char* str, int len) {
135  uint32_t hash = StringHasher::HashSequentialString(str, len, hash_seed_);
136  return names_.Lookup(const_cast<char*>(str), hash, true);
137}
138
139
140const char* const CodeEntry::kEmptyNamePrefix = "";
141const char* const CodeEntry::kEmptyResourceName = "";
142const char* const CodeEntry::kEmptyBailoutReason = "";
143
144
145CodeEntry::~CodeEntry() {
146  delete no_frame_ranges_;
147}
148
149
150uint32_t CodeEntry::GetCallUid() const {
151  uint32_t hash = ComputeIntegerHash(tag_, v8::internal::kZeroHashSeed);
152  if (shared_id_ != 0) {
153    hash ^= ComputeIntegerHash(static_cast<uint32_t>(shared_id_),
154                               v8::internal::kZeroHashSeed);
155  } else {
156    hash ^= ComputeIntegerHash(
157        static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_prefix_)),
158        v8::internal::kZeroHashSeed);
159    hash ^= ComputeIntegerHash(
160        static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_)),
161        v8::internal::kZeroHashSeed);
162    hash ^= ComputeIntegerHash(
163        static_cast<uint32_t>(reinterpret_cast<uintptr_t>(resource_name_)),
164        v8::internal::kZeroHashSeed);
165    hash ^= ComputeIntegerHash(line_number_, v8::internal::kZeroHashSeed);
166  }
167  return hash;
168}
169
170
171bool CodeEntry::IsSameAs(CodeEntry* entry) const {
172  return this == entry
173      || (tag_ == entry->tag_
174          && shared_id_ == entry->shared_id_
175          && (shared_id_ != 0
176              || (name_prefix_ == entry->name_prefix_
177                  && name_ == entry->name_
178                  && resource_name_ == entry->resource_name_
179                  && line_number_ == entry->line_number_)));
180}
181
182
183void CodeEntry::SetBuiltinId(Builtins::Name id) {
184  tag_ = Logger::BUILTIN_TAG;
185  builtin_id_ = id;
186}
187
188
189ProfileNode* ProfileNode::FindChild(CodeEntry* entry) {
190  HashMap::Entry* map_entry =
191      children_.Lookup(entry, CodeEntryHash(entry), false);
192  return map_entry != NULL ?
193      reinterpret_cast<ProfileNode*>(map_entry->value) : NULL;
194}
195
196
197ProfileNode* ProfileNode::FindOrAddChild(CodeEntry* entry) {
198  HashMap::Entry* map_entry =
199      children_.Lookup(entry, CodeEntryHash(entry), true);
200  if (map_entry->value == NULL) {
201    // New node added.
202    ProfileNode* new_node = new ProfileNode(tree_, entry);
203    map_entry->value = new_node;
204    children_list_.Add(new_node);
205  }
206  return reinterpret_cast<ProfileNode*>(map_entry->value);
207}
208
209
210void ProfileNode::Print(int indent) {
211  OS::Print("%5u %*s %s%s %d #%d %s",
212            self_ticks_,
213            indent, "",
214            entry_->name_prefix(),
215            entry_->name(),
216            entry_->script_id(),
217            id(),
218            entry_->bailout_reason());
219  if (entry_->resource_name()[0] != '\0')
220    OS::Print(" %s:%d", entry_->resource_name(), entry_->line_number());
221  OS::Print("\n");
222  for (HashMap::Entry* p = children_.Start();
223       p != NULL;
224       p = children_.Next(p)) {
225    reinterpret_cast<ProfileNode*>(p->value)->Print(indent + 2);
226  }
227}
228
229
230class DeleteNodesCallback {
231 public:
232  void BeforeTraversingChild(ProfileNode*, ProfileNode*) { }
233
234  void AfterAllChildrenTraversed(ProfileNode* node) {
235    delete node;
236  }
237
238  void AfterChildTraversed(ProfileNode*, ProfileNode*) { }
239};
240
241
242ProfileTree::ProfileTree()
243    : root_entry_(Logger::FUNCTION_TAG, "(root)"),
244      next_node_id_(1),
245      root_(new ProfileNode(this, &root_entry_)) {
246}
247
248
249ProfileTree::~ProfileTree() {
250  DeleteNodesCallback cb;
251  TraverseDepthFirst(&cb);
252}
253
254
255ProfileNode* ProfileTree::AddPathFromEnd(const Vector<CodeEntry*>& path) {
256  ProfileNode* node = root_;
257  for (CodeEntry** entry = path.start() + path.length() - 1;
258       entry != path.start() - 1;
259       --entry) {
260    if (*entry != NULL) {
261      node = node->FindOrAddChild(*entry);
262    }
263  }
264  node->IncrementSelfTicks();
265  return node;
266}
267
268
269void ProfileTree::AddPathFromStart(const Vector<CodeEntry*>& path) {
270  ProfileNode* node = root_;
271  for (CodeEntry** entry = path.start();
272       entry != path.start() + path.length();
273       ++entry) {
274    if (*entry != NULL) {
275      node = node->FindOrAddChild(*entry);
276    }
277  }
278  node->IncrementSelfTicks();
279}
280
281
282struct NodesPair {
283  NodesPair(ProfileNode* src, ProfileNode* dst)
284      : src(src), dst(dst) { }
285  ProfileNode* src;
286  ProfileNode* dst;
287};
288
289
290class Position {
291 public:
292  explicit Position(ProfileNode* node)
293      : node(node), child_idx_(0) { }
294  INLINE(ProfileNode* current_child()) {
295    return node->children()->at(child_idx_);
296  }
297  INLINE(bool has_current_child()) {
298    return child_idx_ < node->children()->length();
299  }
300  INLINE(void next_child()) { ++child_idx_; }
301
302  ProfileNode* node;
303 private:
304  int child_idx_;
305};
306
307
308// Non-recursive implementation of a depth-first post-order tree traversal.
309template <typename Callback>
310void ProfileTree::TraverseDepthFirst(Callback* callback) {
311  List<Position> stack(10);
312  stack.Add(Position(root_));
313  while (stack.length() > 0) {
314    Position& current = stack.last();
315    if (current.has_current_child()) {
316      callback->BeforeTraversingChild(current.node, current.current_child());
317      stack.Add(Position(current.current_child()));
318    } else {
319      callback->AfterAllChildrenTraversed(current.node);
320      if (stack.length() > 1) {
321        Position& parent = stack[stack.length() - 2];
322        callback->AfterChildTraversed(parent.node, current.node);
323        parent.next_child();
324      }
325      // Remove child from the stack.
326      stack.RemoveLast();
327    }
328  }
329}
330
331
332CpuProfile::CpuProfile(const char* title, bool record_samples)
333    : title_(title),
334      record_samples_(record_samples),
335      start_time_(TimeTicks::HighResolutionNow()) {
336}
337
338
339void CpuProfile::AddPath(TimeTicks timestamp, const Vector<CodeEntry*>& path) {
340  ProfileNode* top_frame_node = top_down_.AddPathFromEnd(path);
341  if (record_samples_) {
342    timestamps_.Add(timestamp);
343    samples_.Add(top_frame_node);
344  }
345}
346
347
348void CpuProfile::CalculateTotalTicksAndSamplingRate() {
349  end_time_ = TimeTicks::HighResolutionNow();
350}
351
352
353void CpuProfile::Print() {
354  OS::Print("[Top down]:\n");
355  top_down_.Print();
356}
357
358
359CodeEntry* const CodeMap::kSharedFunctionCodeEntry = NULL;
360const CodeMap::CodeTreeConfig::Key CodeMap::CodeTreeConfig::kNoKey = NULL;
361
362
363void CodeMap::AddCode(Address addr, CodeEntry* entry, unsigned size) {
364  DeleteAllCoveredCode(addr, addr + size);
365  CodeTree::Locator locator;
366  tree_.Insert(addr, &locator);
367  locator.set_value(CodeEntryInfo(entry, size));
368}
369
370
371void CodeMap::DeleteAllCoveredCode(Address start, Address end) {
372  List<Address> to_delete;
373  Address addr = end - 1;
374  while (addr >= start) {
375    CodeTree::Locator locator;
376    if (!tree_.FindGreatestLessThan(addr, &locator)) break;
377    Address start2 = locator.key(), end2 = start2 + locator.value().size;
378    if (start2 < end && start < end2) to_delete.Add(start2);
379    addr = start2 - 1;
380  }
381  for (int i = 0; i < to_delete.length(); ++i) tree_.Remove(to_delete[i]);
382}
383
384
385CodeEntry* CodeMap::FindEntry(Address addr, Address* start) {
386  CodeTree::Locator locator;
387  if (tree_.FindGreatestLessThan(addr, &locator)) {
388    // locator.key() <= addr. Need to check that addr is within entry.
389    const CodeEntryInfo& entry = locator.value();
390    if (addr < (locator.key() + entry.size)) {
391      if (start) {
392        *start = locator.key();
393      }
394      return entry.entry;
395    }
396  }
397  return NULL;
398}
399
400
401int CodeMap::GetSharedId(Address addr) {
402  CodeTree::Locator locator;
403  // For shared function entries, 'size' field is used to store their IDs.
404  if (tree_.Find(addr, &locator)) {
405    const CodeEntryInfo& entry = locator.value();
406    ASSERT(entry.entry == kSharedFunctionCodeEntry);
407    return entry.size;
408  } else {
409    tree_.Insert(addr, &locator);
410    int id = next_shared_id_++;
411    locator.set_value(CodeEntryInfo(kSharedFunctionCodeEntry, id));
412    return id;
413  }
414}
415
416
417void CodeMap::MoveCode(Address from, Address to) {
418  if (from == to) return;
419  CodeTree::Locator locator;
420  if (!tree_.Find(from, &locator)) return;
421  CodeEntryInfo entry = locator.value();
422  tree_.Remove(from);
423  AddCode(to, entry.entry, entry.size);
424}
425
426
427void CodeMap::CodeTreePrinter::Call(
428    const Address& key, const CodeMap::CodeEntryInfo& value) {
429  // For shared function entries, 'size' field is used to store their IDs.
430  if (value.entry == kSharedFunctionCodeEntry) {
431    OS::Print("%p SharedFunctionInfo %d\n", key, value.size);
432  } else {
433    OS::Print("%p %5d %s\n", key, value.size, value.entry->name());
434  }
435}
436
437
438void CodeMap::Print() {
439  CodeTreePrinter printer;
440  tree_.ForEach(&printer);
441}
442
443
444CpuProfilesCollection::CpuProfilesCollection(Heap* heap)
445    : function_and_resource_names_(heap),
446      current_profiles_semaphore_(1) {
447}
448
449
450static void DeleteCodeEntry(CodeEntry** entry_ptr) {
451  delete *entry_ptr;
452}
453
454
455static void DeleteCpuProfile(CpuProfile** profile_ptr) {
456  delete *profile_ptr;
457}
458
459
460CpuProfilesCollection::~CpuProfilesCollection() {
461  finished_profiles_.Iterate(DeleteCpuProfile);
462  current_profiles_.Iterate(DeleteCpuProfile);
463  code_entries_.Iterate(DeleteCodeEntry);
464}
465
466
467bool CpuProfilesCollection::StartProfiling(const char* title,
468                                           bool record_samples) {
469  current_profiles_semaphore_.Wait();
470  if (current_profiles_.length() >= kMaxSimultaneousProfiles) {
471    current_profiles_semaphore_.Signal();
472    return false;
473  }
474  for (int i = 0; i < current_profiles_.length(); ++i) {
475    if (strcmp(current_profiles_[i]->title(), title) == 0) {
476      // Ignore attempts to start profile with the same title.
477      current_profiles_semaphore_.Signal();
478      return false;
479    }
480  }
481  current_profiles_.Add(new CpuProfile(title, record_samples));
482  current_profiles_semaphore_.Signal();
483  return true;
484}
485
486
487CpuProfile* CpuProfilesCollection::StopProfiling(const char* title) {
488  const int title_len = StrLength(title);
489  CpuProfile* profile = NULL;
490  current_profiles_semaphore_.Wait();
491  for (int i = current_profiles_.length() - 1; i >= 0; --i) {
492    if (title_len == 0 || strcmp(current_profiles_[i]->title(), title) == 0) {
493      profile = current_profiles_.Remove(i);
494      break;
495    }
496  }
497  current_profiles_semaphore_.Signal();
498
499  if (profile == NULL) return NULL;
500  profile->CalculateTotalTicksAndSamplingRate();
501  finished_profiles_.Add(profile);
502  return profile;
503}
504
505
506bool CpuProfilesCollection::IsLastProfile(const char* title) {
507  // Called from VM thread, and only it can mutate the list,
508  // so no locking is needed here.
509  if (current_profiles_.length() != 1) return false;
510  return StrLength(title) == 0
511      || strcmp(current_profiles_[0]->title(), title) == 0;
512}
513
514
515void CpuProfilesCollection::RemoveProfile(CpuProfile* profile) {
516  // Called from VM thread for a completed profile.
517  for (int i = 0; i < finished_profiles_.length(); i++) {
518    if (profile == finished_profiles_[i]) {
519      finished_profiles_.Remove(i);
520      return;
521    }
522  }
523  UNREACHABLE();
524}
525
526
527void CpuProfilesCollection::AddPathToCurrentProfiles(
528    TimeTicks timestamp, const Vector<CodeEntry*>& path) {
529  // As starting / stopping profiles is rare relatively to this
530  // method, we don't bother minimizing the duration of lock holding,
531  // e.g. copying contents of the list to a local vector.
532  current_profiles_semaphore_.Wait();
533  for (int i = 0; i < current_profiles_.length(); ++i) {
534    current_profiles_[i]->AddPath(timestamp, path);
535  }
536  current_profiles_semaphore_.Signal();
537}
538
539
540CodeEntry* CpuProfilesCollection::NewCodeEntry(
541      Logger::LogEventsAndTags tag,
542      const char* name,
543      const char* name_prefix,
544      const char* resource_name,
545      int line_number,
546      int column_number) {
547  CodeEntry* code_entry = new CodeEntry(tag,
548                                        name,
549                                        name_prefix,
550                                        resource_name,
551                                        line_number,
552                                        column_number);
553  code_entries_.Add(code_entry);
554  return code_entry;
555}
556
557
558const char* const ProfileGenerator::kAnonymousFunctionName =
559    "(anonymous function)";
560const char* const ProfileGenerator::kProgramEntryName =
561    "(program)";
562const char* const ProfileGenerator::kIdleEntryName =
563    "(idle)";
564const char* const ProfileGenerator::kGarbageCollectorEntryName =
565    "(garbage collector)";
566const char* const ProfileGenerator::kUnresolvedFunctionName =
567    "(unresolved function)";
568
569
570ProfileGenerator::ProfileGenerator(CpuProfilesCollection* profiles)
571    : profiles_(profiles),
572      program_entry_(
573          profiles->NewCodeEntry(Logger::FUNCTION_TAG, kProgramEntryName)),
574      idle_entry_(
575          profiles->NewCodeEntry(Logger::FUNCTION_TAG, kIdleEntryName)),
576      gc_entry_(
577          profiles->NewCodeEntry(Logger::BUILTIN_TAG,
578                                 kGarbageCollectorEntryName)),
579      unresolved_entry_(
580          profiles->NewCodeEntry(Logger::FUNCTION_TAG,
581                                 kUnresolvedFunctionName)) {
582}
583
584
585void ProfileGenerator::RecordTickSample(const TickSample& sample) {
586  // Allocate space for stack frames + pc + function + vm-state.
587  ScopedVector<CodeEntry*> entries(sample.frames_count + 3);
588  // As actual number of decoded code entries may vary, initialize
589  // entries vector with NULL values.
590  CodeEntry** entry = entries.start();
591  memset(entry, 0, entries.length() * sizeof(*entry));
592  if (sample.pc != NULL) {
593    if (sample.has_external_callback && sample.state == EXTERNAL &&
594        sample.top_frame_type == StackFrame::EXIT) {
595      // Don't use PC when in external callback code, as it can point
596      // inside callback's code, and we will erroneously report
597      // that a callback calls itself.
598      *entry++ = code_map_.FindEntry(sample.external_callback);
599    } else {
600      Address start;
601      CodeEntry* pc_entry = code_map_.FindEntry(sample.pc, &start);
602      // If pc is in the function code before it set up stack frame or after the
603      // frame was destroyed SafeStackFrameIterator incorrectly thinks that
604      // ebp contains return address of the current function and skips caller's
605      // frame. Check for this case and just skip such samples.
606      if (pc_entry) {
607        List<OffsetRange>* ranges = pc_entry->no_frame_ranges();
608        if (ranges) {
609          Code* code = Code::cast(HeapObject::FromAddress(start));
610          int pc_offset = static_cast<int>(
611              sample.pc - code->instruction_start());
612          for (int i = 0; i < ranges->length(); i++) {
613            OffsetRange& range = ranges->at(i);
614            if (range.from <= pc_offset && pc_offset < range.to) {
615              return;
616            }
617          }
618        }
619        *entry++ = pc_entry;
620
621        if (pc_entry->builtin_id() == Builtins::kFunctionCall ||
622            pc_entry->builtin_id() == Builtins::kFunctionApply) {
623          // When current function is FunctionCall or FunctionApply builtin the
624          // top frame is either frame of the calling JS function or internal
625          // frame. In the latter case we know the caller for sure but in the
626          // former case we don't so we simply replace the frame with
627          // 'unresolved' entry.
628          if (sample.top_frame_type == StackFrame::JAVA_SCRIPT) {
629            *entry++ = unresolved_entry_;
630          }
631        }
632      }
633    }
634
635    for (const Address* stack_pos = sample.stack,
636           *stack_end = stack_pos + sample.frames_count;
637         stack_pos != stack_end;
638         ++stack_pos) {
639      *entry++ = code_map_.FindEntry(*stack_pos);
640    }
641  }
642
643  if (FLAG_prof_browser_mode) {
644    bool no_symbolized_entries = true;
645    for (CodeEntry** e = entries.start(); e != entry; ++e) {
646      if (*e != NULL) {
647        no_symbolized_entries = false;
648        break;
649      }
650    }
651    // If no frames were symbolized, put the VM state entry in.
652    if (no_symbolized_entries) {
653      *entry++ = EntryForVMState(sample.state);
654    }
655  }
656
657  profiles_->AddPathToCurrentProfiles(sample.timestamp, entries);
658}
659
660
661CodeEntry* ProfileGenerator::EntryForVMState(StateTag tag) {
662  switch (tag) {
663    case GC:
664      return gc_entry_;
665    case JS:
666    case COMPILER:
667    // DOM events handlers are reported as OTHER / EXTERNAL entries.
668    // To avoid confusing people, let's put all these entries into
669    // one bucket.
670    case OTHER:
671    case EXTERNAL:
672      return program_entry_;
673    case IDLE:
674      return idle_entry_;
675    default: return NULL;
676  }
677}
678
679} }  // namespace v8::internal
680