1// Copyright 2012 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#include "v8.h"
29
30#include "profile-generator-inl.h"
31
32#include "compiler.h"
33#include "debug.h"
34#include "sampler.h"
35#include "global-handles.h"
36#include "scopeinfo.h"
37#include "unicode.h"
38#include "zone-inl.h"
39
40namespace v8 {
41namespace internal {
42
43
44StringsStorage::StringsStorage()
45    : names_(StringsMatch) {
46}
47
48
49StringsStorage::~StringsStorage() {
50  for (HashMap::Entry* p = names_.Start();
51       p != NULL;
52       p = names_.Next(p)) {
53    DeleteArray(reinterpret_cast<const char*>(p->value));
54  }
55}
56
57
58const char* StringsStorage::GetCopy(const char* src) {
59  int len = static_cast<int>(strlen(src));
60  Vector<char> dst = Vector<char>::New(len + 1);
61  OS::StrNCpy(dst, src, len);
62  dst[len] = '\0';
63  uint32_t hash =
64      StringHasher::HashSequentialString(dst.start(), len, HEAP->HashSeed());
65  return AddOrDisposeString(dst.start(), hash);
66}
67
68
69const char* StringsStorage::GetFormatted(const char* format, ...) {
70  va_list args;
71  va_start(args, format);
72  const char* result = GetVFormatted(format, args);
73  va_end(args);
74  return result;
75}
76
77
78const char* StringsStorage::AddOrDisposeString(char* str, uint32_t hash) {
79  HashMap::Entry* cache_entry = names_.Lookup(str, hash, true);
80  if (cache_entry->value == NULL) {
81    // New entry added.
82    cache_entry->value = str;
83  } else {
84    DeleteArray(str);
85  }
86  return reinterpret_cast<const char*>(cache_entry->value);
87}
88
89
90const char* StringsStorage::GetVFormatted(const char* format, va_list args) {
91  Vector<char> str = Vector<char>::New(1024);
92  int len = OS::VSNPrintF(str, format, args);
93  if (len == -1) {
94    DeleteArray(str.start());
95    return format;
96  }
97  uint32_t hash = StringHasher::HashSequentialString(
98      str.start(), len, HEAP->HashSeed());
99  return AddOrDisposeString(str.start(), hash);
100}
101
102
103const char* StringsStorage::GetName(Name* name) {
104  if (name->IsString()) {
105    String* str = String::cast(name);
106    int length = Min(kMaxNameSize, str->length());
107    SmartArrayPointer<char> data =
108        str->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0, length);
109    uint32_t hash = StringHasher::HashSequentialString(
110        *data, length, name->GetHeap()->HashSeed());
111    return AddOrDisposeString(data.Detach(), hash);
112  } else if (name->IsSymbol()) {
113    return "<symbol>";
114  }
115  return "";
116}
117
118
119const char* StringsStorage::GetName(int index) {
120  return GetFormatted("%d", index);
121}
122
123
124size_t StringsStorage::GetUsedMemorySize() const {
125  size_t size = sizeof(*this);
126  size += sizeof(HashMap::Entry) * names_.capacity();
127  for (HashMap::Entry* p = names_.Start(); p != NULL; p = names_.Next(p)) {
128    size += strlen(reinterpret_cast<const char*>(p->value)) + 1;
129  }
130  return size;
131}
132
133
134const char* const CodeEntry::kEmptyNamePrefix = "";
135const char* const CodeEntry::kEmptyResourceName = "";
136
137
138CodeEntry::~CodeEntry() {
139  delete no_frame_ranges_;
140}
141
142
143void CodeEntry::CopyData(const CodeEntry& source) {
144  tag_ = source.tag_;
145  name_prefix_ = source.name_prefix_;
146  name_ = source.name_;
147  resource_name_ = source.resource_name_;
148  line_number_ = source.line_number_;
149}
150
151
152uint32_t CodeEntry::GetCallUid() const {
153  uint32_t hash = ComputeIntegerHash(tag_, v8::internal::kZeroHashSeed);
154  if (shared_id_ != 0) {
155    hash ^= ComputeIntegerHash(static_cast<uint32_t>(shared_id_),
156                               v8::internal::kZeroHashSeed);
157  } else {
158    hash ^= ComputeIntegerHash(
159        static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_prefix_)),
160        v8::internal::kZeroHashSeed);
161    hash ^= ComputeIntegerHash(
162        static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_)),
163        v8::internal::kZeroHashSeed);
164    hash ^= ComputeIntegerHash(
165        static_cast<uint32_t>(reinterpret_cast<uintptr_t>(resource_name_)),
166        v8::internal::kZeroHashSeed);
167    hash ^= ComputeIntegerHash(line_number_, v8::internal::kZeroHashSeed);
168  }
169  return hash;
170}
171
172
173bool CodeEntry::IsSameAs(CodeEntry* entry) const {
174  return this == entry
175      || (tag_ == entry->tag_
176          && shared_id_ == entry->shared_id_
177          && (shared_id_ != 0
178              || (name_prefix_ == entry->name_prefix_
179                  && name_ == entry->name_
180                  && resource_name_ == entry->resource_name_
181                  && line_number_ == entry->line_number_)));
182}
183
184
185void CodeEntry::SetBuiltinId(Builtins::Name id) {
186  tag_ = Logger::BUILTIN_TAG;
187  builtin_id_ = id;
188}
189
190
191ProfileNode* ProfileNode::FindChild(CodeEntry* entry) {
192  HashMap::Entry* map_entry =
193      children_.Lookup(entry, CodeEntryHash(entry), false);
194  return map_entry != NULL ?
195      reinterpret_cast<ProfileNode*>(map_entry->value) : NULL;
196}
197
198
199ProfileNode* ProfileNode::FindOrAddChild(CodeEntry* entry) {
200  HashMap::Entry* map_entry =
201      children_.Lookup(entry, CodeEntryHash(entry), true);
202  if (map_entry->value == NULL) {
203    // New node added.
204    ProfileNode* new_node = new ProfileNode(tree_, entry);
205    map_entry->value = new_node;
206    children_list_.Add(new_node);
207  }
208  return reinterpret_cast<ProfileNode*>(map_entry->value);
209}
210
211
212double ProfileNode::GetSelfMillis() const {
213  return tree_->TicksToMillis(self_ticks_);
214}
215
216
217double ProfileNode::GetTotalMillis() const {
218  return tree_->TicksToMillis(total_ticks_);
219}
220
221
222void ProfileNode::Print(int indent) {
223  OS::Print("%5u %5u %*c %s%s %d #%d",
224            total_ticks_, self_ticks_,
225            indent, ' ',
226            entry_->name_prefix(),
227            entry_->name(),
228            entry_->script_id(),
229            id());
230  if (entry_->resource_name()[0] != '\0')
231    OS::Print(" %s:%d", entry_->resource_name(), entry_->line_number());
232  OS::Print("\n");
233  for (HashMap::Entry* p = children_.Start();
234       p != NULL;
235       p = children_.Next(p)) {
236    reinterpret_cast<ProfileNode*>(p->value)->Print(indent + 2);
237  }
238}
239
240
241class DeleteNodesCallback {
242 public:
243  void BeforeTraversingChild(ProfileNode*, ProfileNode*) { }
244
245  void AfterAllChildrenTraversed(ProfileNode* node) {
246    delete node;
247  }
248
249  void AfterChildTraversed(ProfileNode*, ProfileNode*) { }
250};
251
252
253ProfileTree::ProfileTree()
254    : root_entry_(Logger::FUNCTION_TAG, "(root)"),
255      next_node_id_(1),
256      root_(new ProfileNode(this, &root_entry_)) {
257}
258
259
260ProfileTree::~ProfileTree() {
261  DeleteNodesCallback cb;
262  TraverseDepthFirst(&cb);
263}
264
265
266ProfileNode* ProfileTree::AddPathFromEnd(const Vector<CodeEntry*>& path) {
267  ProfileNode* node = root_;
268  for (CodeEntry** entry = path.start() + path.length() - 1;
269       entry != path.start() - 1;
270       --entry) {
271    if (*entry != NULL) {
272      node = node->FindOrAddChild(*entry);
273    }
274  }
275  node->IncrementSelfTicks();
276  return node;
277}
278
279
280void ProfileTree::AddPathFromStart(const Vector<CodeEntry*>& path) {
281  ProfileNode* node = root_;
282  for (CodeEntry** entry = path.start();
283       entry != path.start() + path.length();
284       ++entry) {
285    if (*entry != NULL) {
286      node = node->FindOrAddChild(*entry);
287    }
288  }
289  node->IncrementSelfTicks();
290}
291
292
293struct NodesPair {
294  NodesPair(ProfileNode* src, ProfileNode* dst)
295      : src(src), dst(dst) { }
296  ProfileNode* src;
297  ProfileNode* dst;
298};
299
300
301void ProfileTree::SetTickRatePerMs(double ticks_per_ms) {
302  ms_to_ticks_scale_ = ticks_per_ms > 0 ? 1.0 / ticks_per_ms : 1.0;
303}
304
305
306class Position {
307 public:
308  explicit Position(ProfileNode* node)
309      : node(node), child_idx_(0) { }
310  INLINE(ProfileNode* current_child()) {
311    return node->children()->at(child_idx_);
312  }
313  INLINE(bool has_current_child()) {
314    return child_idx_ < node->children()->length();
315  }
316  INLINE(void next_child()) { ++child_idx_; }
317
318  ProfileNode* node;
319 private:
320  int child_idx_;
321};
322
323
324// Non-recursive implementation of a depth-first post-order tree traversal.
325template <typename Callback>
326void ProfileTree::TraverseDepthFirst(Callback* callback) {
327  List<Position> stack(10);
328  stack.Add(Position(root_));
329  while (stack.length() > 0) {
330    Position& current = stack.last();
331    if (current.has_current_child()) {
332      callback->BeforeTraversingChild(current.node, current.current_child());
333      stack.Add(Position(current.current_child()));
334    } else {
335      callback->AfterAllChildrenTraversed(current.node);
336      if (stack.length() > 1) {
337        Position& parent = stack[stack.length() - 2];
338        callback->AfterChildTraversed(parent.node, current.node);
339        parent.next_child();
340      }
341      // Remove child from the stack.
342      stack.RemoveLast();
343    }
344  }
345}
346
347
348class CalculateTotalTicksCallback {
349 public:
350  void BeforeTraversingChild(ProfileNode*, ProfileNode*) { }
351
352  void AfterAllChildrenTraversed(ProfileNode* node) {
353    node->IncreaseTotalTicks(node->self_ticks());
354  }
355
356  void AfterChildTraversed(ProfileNode* parent, ProfileNode* child) {
357    parent->IncreaseTotalTicks(child->total_ticks());
358  }
359};
360
361
362void ProfileTree::CalculateTotalTicks() {
363  CalculateTotalTicksCallback cb;
364  TraverseDepthFirst(&cb);
365}
366
367
368void ProfileTree::ShortPrint() {
369  OS::Print("root: %u %u %.2fms %.2fms\n",
370            root_->total_ticks(), root_->self_ticks(),
371            root_->GetTotalMillis(), root_->GetSelfMillis());
372}
373
374
375CpuProfile::CpuProfile(const char* title, unsigned uid, bool record_samples)
376    : title_(title),
377      uid_(uid),
378      record_samples_(record_samples),
379      start_time_us_(OS::Ticks()),
380      end_time_us_(0) {
381}
382
383
384void CpuProfile::AddPath(const Vector<CodeEntry*>& path) {
385  ProfileNode* top_frame_node = top_down_.AddPathFromEnd(path);
386  if (record_samples_) samples_.Add(top_frame_node);
387}
388
389
390void CpuProfile::CalculateTotalTicksAndSamplingRate() {
391  end_time_us_ = OS::Ticks();
392  top_down_.CalculateTotalTicks();
393
394  double duration_ms = (end_time_us_ - start_time_us_) / 1000.;
395  if (duration_ms < 1) duration_ms = 1;
396  unsigned ticks = top_down_.root()->total_ticks();
397  double rate = ticks / duration_ms;
398  top_down_.SetTickRatePerMs(rate);
399}
400
401
402void CpuProfile::ShortPrint() {
403  OS::Print("top down ");
404  top_down_.ShortPrint();
405}
406
407
408void CpuProfile::Print() {
409  OS::Print("[Top down]:\n");
410  top_down_.Print();
411}
412
413
414CodeEntry* const CodeMap::kSharedFunctionCodeEntry = NULL;
415const CodeMap::CodeTreeConfig::Key CodeMap::CodeTreeConfig::kNoKey = NULL;
416
417
418void CodeMap::AddCode(Address addr, CodeEntry* entry, unsigned size) {
419  DeleteAllCoveredCode(addr, addr + size);
420  CodeTree::Locator locator;
421  tree_.Insert(addr, &locator);
422  locator.set_value(CodeEntryInfo(entry, size));
423}
424
425
426void CodeMap::DeleteAllCoveredCode(Address start, Address end) {
427  List<Address> to_delete;
428  Address addr = end - 1;
429  while (addr >= start) {
430    CodeTree::Locator locator;
431    if (!tree_.FindGreatestLessThan(addr, &locator)) break;
432    Address start2 = locator.key(), end2 = start2 + locator.value().size;
433    if (start2 < end && start < end2) to_delete.Add(start2);
434    addr = start2 - 1;
435  }
436  for (int i = 0; i < to_delete.length(); ++i) tree_.Remove(to_delete[i]);
437}
438
439
440CodeEntry* CodeMap::FindEntry(Address addr, Address* start) {
441  CodeTree::Locator locator;
442  if (tree_.FindGreatestLessThan(addr, &locator)) {
443    // locator.key() <= addr. Need to check that addr is within entry.
444    const CodeEntryInfo& entry = locator.value();
445    if (addr < (locator.key() + entry.size)) {
446      if (start) {
447        *start = locator.key();
448      }
449      return entry.entry;
450    }
451  }
452  return NULL;
453}
454
455
456int CodeMap::GetSharedId(Address addr) {
457  CodeTree::Locator locator;
458  // For shared function entries, 'size' field is used to store their IDs.
459  if (tree_.Find(addr, &locator)) {
460    const CodeEntryInfo& entry = locator.value();
461    ASSERT(entry.entry == kSharedFunctionCodeEntry);
462    return entry.size;
463  } else {
464    tree_.Insert(addr, &locator);
465    int id = next_shared_id_++;
466    locator.set_value(CodeEntryInfo(kSharedFunctionCodeEntry, id));
467    return id;
468  }
469}
470
471
472void CodeMap::MoveCode(Address from, Address to) {
473  if (from == to) return;
474  CodeTree::Locator locator;
475  if (!tree_.Find(from, &locator)) return;
476  CodeEntryInfo entry = locator.value();
477  tree_.Remove(from);
478  AddCode(to, entry.entry, entry.size);
479}
480
481
482void CodeMap::CodeTreePrinter::Call(
483    const Address& key, const CodeMap::CodeEntryInfo& value) {
484  // For shared function entries, 'size' field is used to store their IDs.
485  if (value.entry == kSharedFunctionCodeEntry) {
486    OS::Print("%p SharedFunctionInfo %d\n", key, value.size);
487  } else {
488    OS::Print("%p %5d %s\n", key, value.size, value.entry->name());
489  }
490}
491
492
493void CodeMap::Print() {
494  CodeTreePrinter printer;
495  tree_.ForEach(&printer);
496}
497
498
499CpuProfilesCollection::CpuProfilesCollection()
500    : current_profiles_semaphore_(OS::CreateSemaphore(1)) {
501}
502
503
504static void DeleteCodeEntry(CodeEntry** entry_ptr) {
505  delete *entry_ptr;
506}
507
508
509static void DeleteCpuProfile(CpuProfile** profile_ptr) {
510  delete *profile_ptr;
511}
512
513
514CpuProfilesCollection::~CpuProfilesCollection() {
515  delete current_profiles_semaphore_;
516  finished_profiles_.Iterate(DeleteCpuProfile);
517  current_profiles_.Iterate(DeleteCpuProfile);
518  code_entries_.Iterate(DeleteCodeEntry);
519}
520
521
522bool CpuProfilesCollection::StartProfiling(const char* title, unsigned uid,
523                                           bool record_samples) {
524  ASSERT(uid > 0);
525  current_profiles_semaphore_->Wait();
526  if (current_profiles_.length() >= kMaxSimultaneousProfiles) {
527    current_profiles_semaphore_->Signal();
528    return false;
529  }
530  for (int i = 0; i < current_profiles_.length(); ++i) {
531    if (strcmp(current_profiles_[i]->title(), title) == 0) {
532      // Ignore attempts to start profile with the same title.
533      current_profiles_semaphore_->Signal();
534      return false;
535    }
536  }
537  current_profiles_.Add(new CpuProfile(title, uid, record_samples));
538  current_profiles_semaphore_->Signal();
539  return true;
540}
541
542
543CpuProfile* CpuProfilesCollection::StopProfiling(const char* title) {
544  const int title_len = StrLength(title);
545  CpuProfile* profile = NULL;
546  current_profiles_semaphore_->Wait();
547  for (int i = current_profiles_.length() - 1; i >= 0; --i) {
548    if (title_len == 0 || strcmp(current_profiles_[i]->title(), title) == 0) {
549      profile = current_profiles_.Remove(i);
550      break;
551    }
552  }
553  current_profiles_semaphore_->Signal();
554
555  if (profile == NULL) return NULL;
556  profile->CalculateTotalTicksAndSamplingRate();
557  finished_profiles_.Add(profile);
558  return profile;
559}
560
561
562bool CpuProfilesCollection::IsLastProfile(const char* title) {
563  // Called from VM thread, and only it can mutate the list,
564  // so no locking is needed here.
565  if (current_profiles_.length() != 1) return false;
566  return StrLength(title) == 0
567      || strcmp(current_profiles_[0]->title(), title) == 0;
568}
569
570
571void CpuProfilesCollection::RemoveProfile(CpuProfile* profile) {
572  // Called from VM thread for a completed profile.
573  unsigned uid = profile->uid();
574  for (int i = 0; i < finished_profiles_.length(); i++) {
575    if (uid == finished_profiles_[i]->uid()) {
576      finished_profiles_.Remove(i);
577      return;
578    }
579  }
580  UNREACHABLE();
581}
582
583
584void CpuProfilesCollection::AddPathToCurrentProfiles(
585    const Vector<CodeEntry*>& path) {
586  // As starting / stopping profiles is rare relatively to this
587  // method, we don't bother minimizing the duration of lock holding,
588  // e.g. copying contents of the list to a local vector.
589  current_profiles_semaphore_->Wait();
590  for (int i = 0; i < current_profiles_.length(); ++i) {
591    current_profiles_[i]->AddPath(path);
592  }
593  current_profiles_semaphore_->Signal();
594}
595
596
597CodeEntry* CpuProfilesCollection::NewCodeEntry(
598      Logger::LogEventsAndTags tag,
599      const char* name,
600      const char* name_prefix,
601      const char* resource_name,
602      int line_number) {
603  CodeEntry* code_entry = new CodeEntry(tag,
604                                        name,
605                                        name_prefix,
606                                        resource_name,
607                                        line_number);
608  code_entries_.Add(code_entry);
609  return code_entry;
610}
611
612
613const char* const ProfileGenerator::kAnonymousFunctionName =
614    "(anonymous function)";
615const char* const ProfileGenerator::kProgramEntryName =
616    "(program)";
617const char* const ProfileGenerator::kIdleEntryName =
618    "(idle)";
619const char* const ProfileGenerator::kGarbageCollectorEntryName =
620    "(garbage collector)";
621const char* const ProfileGenerator::kUnresolvedFunctionName =
622    "(unresolved function)";
623
624
625ProfileGenerator::ProfileGenerator(CpuProfilesCollection* profiles)
626    : profiles_(profiles),
627      program_entry_(
628          profiles->NewCodeEntry(Logger::FUNCTION_TAG, kProgramEntryName)),
629      idle_entry_(
630          profiles->NewCodeEntry(Logger::FUNCTION_TAG, kIdleEntryName)),
631      gc_entry_(
632          profiles->NewCodeEntry(Logger::BUILTIN_TAG,
633                                 kGarbageCollectorEntryName)),
634      unresolved_entry_(
635          profiles->NewCodeEntry(Logger::FUNCTION_TAG,
636                                 kUnresolvedFunctionName)) {
637}
638
639
640void ProfileGenerator::RecordTickSample(const TickSample& sample) {
641  // Allocate space for stack frames + pc + function + vm-state.
642  ScopedVector<CodeEntry*> entries(sample.frames_count + 3);
643  // As actual number of decoded code entries may vary, initialize
644  // entries vector with NULL values.
645  CodeEntry** entry = entries.start();
646  memset(entry, 0, entries.length() * sizeof(*entry));
647  if (sample.pc != NULL) {
648    if (sample.has_external_callback && sample.state == EXTERNAL &&
649        sample.top_frame_type == StackFrame::EXIT) {
650      // Don't use PC when in external callback code, as it can point
651      // inside callback's code, and we will erroneously report
652      // that a callback calls itself.
653      *entry++ = code_map_.FindEntry(sample.external_callback);
654    } else {
655      Address start;
656      CodeEntry* pc_entry = code_map_.FindEntry(sample.pc, &start);
657      // If pc is in the function code before it set up stack frame or after the
658      // frame was destroyed SafeStackFrameIterator incorrectly thinks that
659      // ebp contains return address of the current function and skips caller's
660      // frame. Check for this case and just skip such samples.
661      if (pc_entry) {
662        List<OffsetRange>* ranges = pc_entry->no_frame_ranges();
663        if (ranges) {
664          Code* code = Code::cast(HeapObject::FromAddress(start));
665          int pc_offset = static_cast<int>(
666              sample.pc - code->instruction_start());
667          for (int i = 0; i < ranges->length(); i++) {
668            OffsetRange& range = ranges->at(i);
669            if (range.from <= pc_offset && pc_offset < range.to) {
670              return;
671            }
672          }
673        }
674        *entry++ = pc_entry;
675
676        if (pc_entry->builtin_id() == Builtins::kFunctionCall ||
677            pc_entry->builtin_id() == Builtins::kFunctionApply) {
678          // When current function is FunctionCall or FunctionApply builtin the
679          // top frame is either frame of the calling JS function or internal
680          // frame. In the latter case we know the caller for sure but in the
681          // former case we don't so we simply replace the frame with
682          // 'unresolved' entry.
683          if (sample.top_frame_type == StackFrame::JAVA_SCRIPT) {
684            *entry++ = unresolved_entry_;
685          }
686        }
687      }
688    }
689
690    for (const Address* stack_pos = sample.stack,
691           *stack_end = stack_pos + sample.frames_count;
692         stack_pos != stack_end;
693         ++stack_pos) {
694      *entry++ = code_map_.FindEntry(*stack_pos);
695    }
696  }
697
698  if (FLAG_prof_browser_mode) {
699    bool no_symbolized_entries = true;
700    for (CodeEntry** e = entries.start(); e != entry; ++e) {
701      if (*e != NULL) {
702        no_symbolized_entries = false;
703        break;
704      }
705    }
706    // If no frames were symbolized, put the VM state entry in.
707    if (no_symbolized_entries) {
708      *entry++ = EntryForVMState(sample.state);
709    }
710  }
711
712  profiles_->AddPathToCurrentProfiles(entries);
713}
714
715
716} }  // namespace v8::internal
717