1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/runtime-profiler.h"
6
7#include "src/assembler.h"
8#include "src/ast/scopeinfo.h"
9#include "src/base/platform/platform.h"
10#include "src/bootstrapper.h"
11#include "src/code-stubs.h"
12#include "src/compilation-cache.h"
13#include "src/execution.h"
14#include "src/frames-inl.h"
15#include "src/full-codegen/full-codegen.h"
16#include "src/global-handles.h"
17
18namespace v8 {
19namespace internal {
20
21
22// Number of times a function has to be seen on the stack before it is
23// compiled for baseline.
24static const int kProfilerTicksBeforeBaseline = 1;
25// Number of times a function has to be seen on the stack before it is
26// optimized.
27static const int kProfilerTicksBeforeOptimization = 2;
28// If the function optimization was disabled due to high deoptimization count,
29// but the function is hot and has been seen on the stack this number of times,
30// then we try to reenable optimization for this function.
31static const int kProfilerTicksBeforeReenablingOptimization = 250;
32// If a function does not have enough type info (according to
33// FLAG_type_info_threshold), but has seen a huge number of ticks,
34// optimize it as it is.
35static const int kTicksWhenNotEnoughTypeInfo = 100;
36// We only have one byte to store the number of ticks.
37STATIC_ASSERT(kProfilerTicksBeforeOptimization < 256);
38STATIC_ASSERT(kProfilerTicksBeforeReenablingOptimization < 256);
39STATIC_ASSERT(kTicksWhenNotEnoughTypeInfo < 256);
40
41// Maximum size in bytes of generate code for a function to allow OSR.
42static const int kOSRCodeSizeAllowanceBase =
43    100 * FullCodeGenerator::kCodeSizeMultiplier;
44
45static const int kOSRCodeSizeAllowancePerTick =
46    4 * FullCodeGenerator::kCodeSizeMultiplier;
47
48// Maximum size in bytes of generated code for a function to be optimized
49// the very first time it is seen on the stack.
50static const int kMaxSizeEarlyOpt =
51    5 * FullCodeGenerator::kCodeSizeMultiplier;
52
53
54RuntimeProfiler::RuntimeProfiler(Isolate* isolate)
55    : isolate_(isolate),
56      any_ic_changed_(false) {
57}
58
59static void GetICCounts(JSFunction* function, int* ic_with_type_info_count,
60                        int* ic_generic_count, int* ic_total_count,
61                        int* type_info_percentage, int* generic_percentage) {
62  *ic_total_count = 0;
63  *ic_generic_count = 0;
64  *ic_with_type_info_count = 0;
65  if (function->code()->kind() == Code::FUNCTION) {
66    Code* shared_code = function->shared()->code();
67    Object* raw_info = shared_code->type_feedback_info();
68    if (raw_info->IsTypeFeedbackInfo()) {
69      TypeFeedbackInfo* info = TypeFeedbackInfo::cast(raw_info);
70      *ic_with_type_info_count = info->ic_with_type_info_count();
71      *ic_generic_count = info->ic_generic_count();
72      *ic_total_count = info->ic_total_count();
73    }
74  }
75
76  // Harvest vector-ics as well
77  TypeFeedbackVector* vector = function->feedback_vector();
78  int with = 0, gen = 0;
79  vector->ComputeCounts(&with, &gen);
80  *ic_with_type_info_count += with;
81  *ic_generic_count += gen;
82
83  if (*ic_total_count > 0) {
84    *type_info_percentage = 100 * *ic_with_type_info_count / *ic_total_count;
85    *generic_percentage = 100 * *ic_generic_count / *ic_total_count;
86  } else {
87    *type_info_percentage = 100;  // Compared against lower bound.
88    *generic_percentage = 0;      // Compared against upper bound.
89  }
90}
91
92static void TraceRecompile(JSFunction* function, const char* reason,
93                           const char* type) {
94  if (FLAG_trace_opt &&
95      function->shared()->PassesFilter(FLAG_hydrogen_filter)) {
96    PrintF("[marking ");
97    function->ShortPrint();
98    PrintF(" for %s recompilation, reason: %s", type, reason);
99    if (FLAG_type_info_threshold > 0) {
100      int typeinfo, generic, total, type_percentage, generic_percentage;
101      GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
102                  &generic_percentage);
103      PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total,
104             type_percentage);
105      PrintF(", generic ICs: %d/%d (%d%%)", generic, total, generic_percentage);
106    }
107    PrintF("]\n");
108  }
109}
110
111void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
112  TraceRecompile(function, reason, "optimized");
113
114  // TODO(4280): Fix this to check function is compiled to baseline once we
115  // have a standard way to check that. For now, if baseline code doesn't have
116  // a bytecode array.
117  DCHECK(!function->shared()->HasBytecodeArray());
118  function->AttemptConcurrentOptimization();
119}
120
121void RuntimeProfiler::Baseline(JSFunction* function, const char* reason) {
122  TraceRecompile(function, reason, "baseline");
123
124  // TODO(4280): Fix this to check function is compiled for the interpreter
125  // once we have a standard way to check that. For now function will only
126  // have a bytecode array if compiled for the interpreter.
127  DCHECK(function->shared()->HasBytecodeArray());
128  function->MarkForBaseline();
129}
130
131void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function,
132                                                int loop_nesting_levels) {
133  SharedFunctionInfo* shared = function->shared();
134  if (!FLAG_use_osr || function->shared()->IsBuiltin()) {
135    return;
136  }
137
138  // If the code is not optimizable, don't try OSR.
139  if (shared->optimization_disabled()) return;
140
141  // We are not prepared to do OSR for a function that already has an
142  // allocated arguments object.  The optimized code would bypass it for
143  // arguments accesses, which is unsound.  Don't try OSR.
144  if (shared->uses_arguments()) return;
145
146  // We're using on-stack replacement: patch the unoptimized code so that
147  // any back edge in any unoptimized frame will trigger on-stack
148  // replacement for that frame.
149  if (FLAG_trace_osr) {
150    PrintF("[OSR - patching back edges in ");
151    function->PrintName();
152    PrintF("]\n");
153  }
154
155  for (int i = 0; i < loop_nesting_levels; i++) {
156    BackEdgeTable::Patch(isolate_, shared->code());
157  }
158}
159
160void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function,
161                                               int frame_count,
162                                               bool frame_optimized) {
163  SharedFunctionInfo* shared = function->shared();
164  Code* shared_code = shared->code();
165  if (shared_code->kind() != Code::FUNCTION) return;
166  if (function->IsInOptimizationQueue()) return;
167
168  if (FLAG_always_osr) {
169    AttemptOnStackReplacement(function, Code::kMaxLoopNestingMarker);
170    // Fall through and do a normal optimized compile as well.
171  } else if (!frame_optimized &&
172             (function->IsMarkedForOptimization() ||
173              function->IsMarkedForConcurrentOptimization() ||
174              function->IsOptimized())) {
175    // Attempt OSR if we are still running unoptimized code even though the
176    // the function has long been marked or even already been optimized.
177    int ticks = shared_code->profiler_ticks();
178    int64_t allowance =
179        kOSRCodeSizeAllowanceBase +
180        static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTick;
181    if (shared_code->CodeSize() > allowance &&
182        ticks < Code::ProfilerTicksField::kMax) {
183      shared_code->set_profiler_ticks(ticks + 1);
184    } else {
185      AttemptOnStackReplacement(function);
186    }
187    return;
188  }
189
190  // Only record top-level code on top of the execution stack and
191  // avoid optimizing excessively large scripts since top-level code
192  // will be executed only once.
193  const int kMaxToplevelSourceSize = 10 * 1024;
194  if (shared->is_toplevel() &&
195      (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) {
196    return;
197  }
198
199  // Do not record non-optimizable functions.
200  if (shared->optimization_disabled()) {
201    if (shared->deopt_count() >= FLAG_max_opt_count) {
202      // If optimization was disabled due to many deoptimizations,
203      // then check if the function is hot and try to reenable optimization.
204      int ticks = shared_code->profiler_ticks();
205      if (ticks >= kProfilerTicksBeforeReenablingOptimization) {
206        shared_code->set_profiler_ticks(0);
207        shared->TryReenableOptimization();
208      } else {
209        shared_code->set_profiler_ticks(ticks + 1);
210      }
211    }
212    return;
213  }
214  if (function->IsOptimized()) return;
215
216  int ticks = shared_code->profiler_ticks();
217
218  if (ticks >= kProfilerTicksBeforeOptimization) {
219    int typeinfo, generic, total, type_percentage, generic_percentage;
220    GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
221                &generic_percentage);
222    if (type_percentage >= FLAG_type_info_threshold &&
223        generic_percentage <= FLAG_generic_ic_threshold) {
224      // If this particular function hasn't had any ICs patched for enough
225      // ticks, optimize it now.
226      Optimize(function, "hot and stable");
227    } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
228      Optimize(function, "not much type info but very hot");
229    } else {
230      shared_code->set_profiler_ticks(ticks + 1);
231      if (FLAG_trace_opt_verbose) {
232        PrintF("[not yet optimizing ");
233        function->PrintName();
234        PrintF(", not enough type info: %d/%d (%d%%)]\n", typeinfo, total,
235               type_percentage);
236      }
237    }
238  } else if (!any_ic_changed_ &&
239             shared_code->instruction_size() < kMaxSizeEarlyOpt) {
240    // If no IC was patched since the last tick and this function is very
241    // small, optimistically optimize it now.
242    int typeinfo, generic, total, type_percentage, generic_percentage;
243    GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
244                &generic_percentage);
245    if (type_percentage >= FLAG_type_info_threshold &&
246        generic_percentage <= FLAG_generic_ic_threshold) {
247      Optimize(function, "small function");
248    } else {
249      shared_code->set_profiler_ticks(ticks + 1);
250    }
251  } else {
252    shared_code->set_profiler_ticks(ticks + 1);
253  }
254}
255
256void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function) {
257  if (function->IsInOptimizationQueue()) return;
258
259  SharedFunctionInfo* shared = function->shared();
260  int ticks = shared->profiler_ticks();
261
262  // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller
263  // than kMaxToplevelSourceSize.
264  // TODO(rmcilroy): Consider whether we should optimize small functions when
265  // they are first seen on the stack (e.g., kMaxSizeEarlyOpt).
266
267  if (function->IsMarkedForBaseline() || function->IsMarkedForOptimization() ||
268      function->IsMarkedForConcurrentOptimization() ||
269      function->IsOptimized()) {
270    // TODO(rmcilroy): Support OSR in these cases.
271    return;
272  }
273
274  if (shared->optimization_disabled() &&
275      shared->disable_optimization_reason() == kOptimizationDisabledForTest) {
276    // Don't baseline functions which have been marked by NeverOptimizeFunction
277    // in a test.
278    return;
279  }
280
281  if (ticks >= kProfilerTicksBeforeBaseline) {
282    Baseline(function, "hot enough for baseline");
283  }
284}
285
286void RuntimeProfiler::MarkCandidatesForOptimization() {
287  HandleScope scope(isolate_);
288
289  if (!isolate_->use_crankshaft()) return;
290
291  DisallowHeapAllocation no_gc;
292
293  // Run through the JavaScript frames and collect them. If we already
294  // have a sample of the function, we mark it for optimizations
295  // (eagerly or lazily).
296  int frame_count = 0;
297  int frame_count_limit = FLAG_frame_count;
298  for (JavaScriptFrameIterator it(isolate_);
299       frame_count++ < frame_count_limit && !it.done();
300       it.Advance()) {
301    JavaScriptFrame* frame = it.frame();
302    JSFunction* function = frame->function();
303
304    List<JSFunction*> functions(4);
305    frame->GetFunctions(&functions);
306    for (int i = functions.length(); --i >= 0; ) {
307      SharedFunctionInfo* shared_function_info = functions[i]->shared();
308      int ticks = shared_function_info->profiler_ticks();
309      if (ticks < Smi::kMaxValue) {
310        shared_function_info->set_profiler_ticks(ticks + 1);
311      }
312    }
313
314    if (frame->is_interpreted()) {
315      DCHECK(!frame->is_optimized());
316      MaybeOptimizeIgnition(function);
317    } else {
318      MaybeOptimizeFullCodegen(function, frame_count, frame->is_optimized());
319    }
320  }
321  any_ic_changed_ = false;
322}
323
324
325}  // namespace internal
326}  // namespace v8
327