runtime-profiler.cc revision c8c1d9e03f4babd16833b0f8ccf6aab5fa6e8c7a
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/runtime-profiler.h"
6
7#include "src/assembler.h"
8#include "src/base/platform/platform.h"
9#include "src/bootstrapper.h"
10#include "src/code-stubs.h"
11#include "src/compilation-cache.h"
12#include "src/compiler.h"
13#include "src/execution.h"
14#include "src/frames-inl.h"
15#include "src/full-codegen/full-codegen.h"
16#include "src/global-handles.h"
17#include "src/interpreter/interpreter.h"
18
19namespace v8 {
20namespace internal {
21
22
23// Number of times a function has to be seen on the stack before it is
24// compiled for baseline.
25static const int kProfilerTicksBeforeBaseline = 0;
26// Number of times a function has to be seen on the stack before it is
27// optimized.
28static const int kProfilerTicksBeforeOptimization = 2;
29// If the function optimization was disabled due to high deoptimization count,
30// but the function is hot and has been seen on the stack this number of times,
31// then we try to reenable optimization for this function.
32static const int kProfilerTicksBeforeReenablingOptimization = 250;
33// If a function does not have enough type info (according to
34// FLAG_type_info_threshold), but has seen a huge number of ticks,
35// optimize it as it is.
36static const int kTicksWhenNotEnoughTypeInfo = 100;
37// We only have one byte to store the number of ticks.
38STATIC_ASSERT(kProfilerTicksBeforeOptimization < 256);
39STATIC_ASSERT(kProfilerTicksBeforeReenablingOptimization < 256);
40STATIC_ASSERT(kTicksWhenNotEnoughTypeInfo < 256);
41
42// Maximum size in bytes of generate code for a function to allow OSR.
43static const int kOSRCodeSizeAllowanceBase =
44    100 * FullCodeGenerator::kCodeSizeMultiplier;
45static const int kOSRCodeSizeAllowanceBaseIgnition =
46    10 * interpreter::Interpreter::kCodeSizeMultiplier;
47
48static const int kOSRCodeSizeAllowancePerTick =
49    4 * FullCodeGenerator::kCodeSizeMultiplier;
50static const int kOSRCodeSizeAllowancePerTickIgnition =
51    2 * interpreter::Interpreter::kCodeSizeMultiplier;
52
53// Maximum size in bytes of generated code for a function to be optimized
54// the very first time it is seen on the stack.
55static const int kMaxSizeEarlyOpt =
56    5 * FullCodeGenerator::kCodeSizeMultiplier;
57static const int kMaxSizeEarlyOptIgnition =
58    5 * interpreter::Interpreter::kCodeSizeMultiplier;
59
60// Certain functions are simply too big to be worth optimizing.
61// We aren't using the code size multiplier here because there is no
62// "kMaxSizeOpt" with which we would need to normalize. This constant is
63// only for optimization decisions coming into TurboFan from Ignition.
64static const int kMaxSizeOptIgnition = 250 * 1024;
65
66#define OPTIMIZATION_REASON_LIST(V)                            \
67  V(DoNotOptimize, "do not optimize")                          \
68  V(HotAndStable, "hot and stable")                            \
69  V(HotEnoughForBaseline, "hot enough for baseline")           \
70  V(HotWithoutMuchTypeInfo, "not much type info but very hot") \
71  V(SmallFunction, "small function")
72
73enum class OptimizationReason : uint8_t {
74#define OPTIMIZATION_REASON_CONSTANTS(Constant, message) k##Constant,
75  OPTIMIZATION_REASON_LIST(OPTIMIZATION_REASON_CONSTANTS)
76#undef OPTIMIZATION_REASON_CONSTANTS
77};
78
79char const* OptimizationReasonToString(OptimizationReason reason) {
80  static char const* reasons[] = {
81#define OPTIMIZATION_REASON_TEXTS(Constant, message) message,
82      OPTIMIZATION_REASON_LIST(OPTIMIZATION_REASON_TEXTS)
83#undef OPTIMIZATION_REASON_TEXTS
84  };
85  size_t const index = static_cast<size_t>(reason);
86  DCHECK_LT(index, arraysize(reasons));
87  return reasons[index];
88}
89
90std::ostream& operator<<(std::ostream& os, OptimizationReason reason) {
91  return os << OptimizationReasonToString(reason);
92}
93
94RuntimeProfiler::RuntimeProfiler(Isolate* isolate)
95    : isolate_(isolate),
96      any_ic_changed_(false) {
97}
98
99static void GetICCounts(JSFunction* function, int* ic_with_type_info_count,
100                        int* ic_generic_count, int* ic_total_count,
101                        int* type_info_percentage, int* generic_percentage) {
102  *ic_total_count = 0;
103  *ic_generic_count = 0;
104  *ic_with_type_info_count = 0;
105  if (function->code()->kind() == Code::FUNCTION) {
106    Code* shared_code = function->shared()->code();
107    Object* raw_info = shared_code->type_feedback_info();
108    if (raw_info->IsTypeFeedbackInfo()) {
109      TypeFeedbackInfo* info = TypeFeedbackInfo::cast(raw_info);
110      *ic_with_type_info_count = info->ic_with_type_info_count();
111      *ic_generic_count = info->ic_generic_count();
112      *ic_total_count = info->ic_total_count();
113    }
114  }
115
116  // Harvest vector-ics as well
117  TypeFeedbackVector* vector = function->feedback_vector();
118  int with = 0, gen = 0, type_vector_ic_count = 0;
119  const bool is_interpreted = function->shared()->IsInterpreted();
120
121  vector->ComputeCounts(&with, &gen, &type_vector_ic_count, is_interpreted);
122  *ic_total_count += type_vector_ic_count;
123  *ic_with_type_info_count += with;
124  *ic_generic_count += gen;
125
126  if (*ic_total_count > 0) {
127    *type_info_percentage = 100 * *ic_with_type_info_count / *ic_total_count;
128    *generic_percentage = 100 * *ic_generic_count / *ic_total_count;
129  } else {
130    *type_info_percentage = 100;  // Compared against lower bound.
131    *generic_percentage = 0;      // Compared against upper bound.
132  }
133}
134
135static void TraceRecompile(JSFunction* function, const char* reason,
136                           const char* type) {
137  if (FLAG_trace_opt &&
138      function->shared()->PassesFilter(FLAG_hydrogen_filter)) {
139    PrintF("[marking ");
140    function->ShortPrint();
141    PrintF(" for %s recompilation, reason: %s", type, reason);
142    if (FLAG_type_info_threshold > 0) {
143      int typeinfo, generic, total, type_percentage, generic_percentage;
144      GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
145                  &generic_percentage);
146      PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total,
147             type_percentage);
148      PrintF(", generic ICs: %d/%d (%d%%)", generic, total, generic_percentage);
149    }
150    PrintF("]\n");
151  }
152}
153
154void RuntimeProfiler::Optimize(JSFunction* function,
155                               OptimizationReason reason) {
156  DCHECK_NE(reason, OptimizationReason::kDoNotOptimize);
157  TraceRecompile(function, OptimizationReasonToString(reason), "optimized");
158  function->AttemptConcurrentOptimization();
159}
160
161void RuntimeProfiler::Baseline(JSFunction* function,
162                               OptimizationReason reason) {
163  DCHECK_NE(reason, OptimizationReason::kDoNotOptimize);
164  TraceRecompile(function, OptimizationReasonToString(reason), "baseline");
165  DCHECK(function->shared()->IsInterpreted());
166  function->MarkForBaseline();
167}
168
169void RuntimeProfiler::AttemptOnStackReplacement(JavaScriptFrame* frame,
170                                                int loop_nesting_levels) {
171  JSFunction* function = frame->function();
172  SharedFunctionInfo* shared = function->shared();
173  if (!FLAG_use_osr || function->shared()->IsBuiltin()) {
174    return;
175  }
176
177  // If the code is not optimizable, don't try OSR.
178  if (shared->optimization_disabled()) return;
179
180  // We are not prepared to do OSR for a function that already has an
181  // allocated arguments object.  The optimized code would bypass it for
182  // arguments accesses, which is unsound.  Don't try OSR.
183  if (shared->uses_arguments()) return;
184
185  // We're using on-stack replacement: modify unoptimized code so that
186  // certain back edges in any unoptimized frame will trigger on-stack
187  // replacement for that frame.
188  //  - Ignition: Store new loop nesting level in BytecodeArray header.
189  //  - FullCodegen: Patch back edges up to new level using BackEdgeTable.
190  if (FLAG_trace_osr) {
191    PrintF("[OSR - arming back edges in ");
192    function->PrintName();
193    PrintF("]\n");
194  }
195
196  if (frame->type() == StackFrame::JAVA_SCRIPT) {
197    DCHECK(shared->HasBaselineCode());
198    DCHECK(BackEdgeTable::Verify(shared->GetIsolate(), shared->code()));
199    for (int i = 0; i < loop_nesting_levels; i++) {
200      BackEdgeTable::Patch(isolate_, shared->code());
201    }
202  } else if (frame->type() == StackFrame::INTERPRETED) {
203    DCHECK(shared->HasBytecodeArray());
204    if (!FLAG_ignition_osr) return;  // Only use this when enabled.
205    int level = shared->bytecode_array()->osr_loop_nesting_level();
206    shared->bytecode_array()->set_osr_loop_nesting_level(
207        Min(level + loop_nesting_levels, AbstractCode::kMaxLoopNestingMarker));
208  } else {
209    UNREACHABLE();
210  }
211}
212
213void RuntimeProfiler::MaybeOptimizeFullCodegen(JSFunction* function,
214                                               JavaScriptFrame* frame,
215                                               int frame_count) {
216  SharedFunctionInfo* shared = function->shared();
217  Code* shared_code = shared->code();
218  if (shared_code->kind() != Code::FUNCTION) return;
219  if (function->IsInOptimizationQueue()) return;
220
221  if (FLAG_always_osr) {
222    AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
223    // Fall through and do a normal optimized compile as well.
224  } else if (!frame->is_optimized() &&
225             (function->IsMarkedForOptimization() ||
226              function->IsMarkedForConcurrentOptimization() ||
227              function->IsOptimized())) {
228    // Attempt OSR if we are still running unoptimized code even though the
229    // the function has long been marked or even already been optimized.
230    int ticks = shared_code->profiler_ticks();
231    int64_t allowance =
232        kOSRCodeSizeAllowanceBase +
233        static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTick;
234    if (shared_code->CodeSize() > allowance &&
235        ticks < Code::ProfilerTicksField::kMax) {
236      shared_code->set_profiler_ticks(ticks + 1);
237    } else {
238      AttemptOnStackReplacement(frame);
239    }
240    return;
241  }
242
243  // Only record top-level code on top of the execution stack and
244  // avoid optimizing excessively large scripts since top-level code
245  // will be executed only once.
246  const int kMaxToplevelSourceSize = 10 * 1024;
247  if (shared->is_toplevel() &&
248      (frame_count > 1 || shared->SourceSize() > kMaxToplevelSourceSize)) {
249    return;
250  }
251
252  // Do not record non-optimizable functions.
253  if (shared->optimization_disabled()) {
254    if (shared->deopt_count() >= FLAG_max_opt_count) {
255      // If optimization was disabled due to many deoptimizations,
256      // then check if the function is hot and try to reenable optimization.
257      int ticks = shared_code->profiler_ticks();
258      if (ticks >= kProfilerTicksBeforeReenablingOptimization) {
259        shared_code->set_profiler_ticks(0);
260        shared->TryReenableOptimization();
261      } else {
262        shared_code->set_profiler_ticks(ticks + 1);
263      }
264    }
265    return;
266  }
267  if (frame->is_optimized()) return;
268
269  int ticks = shared_code->profiler_ticks();
270
271  if (ticks >= kProfilerTicksBeforeOptimization) {
272    int typeinfo, generic, total, type_percentage, generic_percentage;
273    GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
274                &generic_percentage);
275    if (type_percentage >= FLAG_type_info_threshold &&
276        generic_percentage <= FLAG_generic_ic_threshold) {
277      // If this particular function hasn't had any ICs patched for enough
278      // ticks, optimize it now.
279      Optimize(function, OptimizationReason::kHotAndStable);
280    } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
281      Optimize(function, OptimizationReason::kHotWithoutMuchTypeInfo);
282    } else {
283      shared_code->set_profiler_ticks(ticks + 1);
284      if (FLAG_trace_opt_verbose) {
285        PrintF("[not yet optimizing ");
286        function->PrintName();
287        PrintF(", not enough type info: %d/%d (%d%%)]\n", typeinfo, total,
288               type_percentage);
289      }
290    }
291  } else if (!any_ic_changed_ &&
292             shared_code->instruction_size() < kMaxSizeEarlyOpt) {
293    // If no IC was patched since the last tick and this function is very
294    // small, optimistically optimize it now.
295    int typeinfo, generic, total, type_percentage, generic_percentage;
296    GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
297                &generic_percentage);
298    if (type_percentage >= FLAG_type_info_threshold &&
299        generic_percentage <= FLAG_generic_ic_threshold) {
300      Optimize(function, OptimizationReason::kSmallFunction);
301    } else {
302      shared_code->set_profiler_ticks(ticks + 1);
303    }
304  } else {
305    shared_code->set_profiler_ticks(ticks + 1);
306  }
307}
308
309void RuntimeProfiler::MaybeBaselineIgnition(JSFunction* function,
310                                            JavaScriptFrame* frame) {
311  if (function->IsInOptimizationQueue()) return;
312
313  if (FLAG_always_osr) {
314    AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
315    // Fall through and do a normal baseline compile as well.
316  } else if (MaybeOSRIgnition(function, frame)) {
317    return;
318  }
319
320  SharedFunctionInfo* shared = function->shared();
321  int ticks = shared->profiler_ticks();
322
323  if (shared->optimization_disabled() &&
324      shared->disable_optimization_reason() == kOptimizationDisabledForTest) {
325    // Don't baseline functions which have been marked by NeverOptimizeFunction
326    // in a test.
327    return;
328  }
329
330  if (ticks >= kProfilerTicksBeforeBaseline) {
331    Baseline(function, OptimizationReason::kHotEnoughForBaseline);
332  }
333}
334
335void RuntimeProfiler::MaybeOptimizeIgnition(JSFunction* function,
336                                            JavaScriptFrame* frame) {
337  if (function->IsInOptimizationQueue()) return;
338
339  if (FLAG_always_osr) {
340    AttemptOnStackReplacement(frame, AbstractCode::kMaxLoopNestingMarker);
341    // Fall through and do a normal optimized compile as well.
342  } else if (MaybeOSRIgnition(function, frame)) {
343    return;
344  }
345
346  SharedFunctionInfo* shared = function->shared();
347  int ticks = shared->profiler_ticks();
348
349  if (shared->optimization_disabled()) {
350    if (shared->deopt_count() >= FLAG_max_opt_count) {
351      // If optimization was disabled due to many deoptimizations,
352      // then check if the function is hot and try to reenable optimization.
353      if (ticks >= kProfilerTicksBeforeReenablingOptimization) {
354        shared->set_profiler_ticks(0);
355        shared->TryReenableOptimization();
356      }
357    }
358    return;
359  }
360
361  if (frame->is_optimized()) return;
362
363  OptimizationReason reason = ShouldOptimizeIgnition(function, frame);
364
365  if (reason != OptimizationReason::kDoNotOptimize) {
366    Optimize(function, reason);
367  }
368}
369
370bool RuntimeProfiler::MaybeOSRIgnition(JSFunction* function,
371                                       JavaScriptFrame* frame) {
372  SharedFunctionInfo* shared = function->shared();
373  int ticks = shared->profiler_ticks();
374
375  // TODO(rmcilroy): Also ensure we only OSR top-level code if it is smaller
376  // than kMaxToplevelSourceSize.
377
378  bool osr_before_baselined = function->IsMarkedForBaseline() &&
379                              ShouldOptimizeIgnition(function, frame) !=
380                                  OptimizationReason::kDoNotOptimize;
381  if (!frame->is_optimized() &&
382      (osr_before_baselined || function->IsMarkedForOptimization() ||
383       function->IsMarkedForConcurrentOptimization() ||
384       function->IsOptimized())) {
385    // Attempt OSR if we are still running interpreted code even though the
386    // the function has long been marked or even already been optimized.
387    int64_t allowance =
388        kOSRCodeSizeAllowanceBaseIgnition +
389        static_cast<int64_t>(ticks) * kOSRCodeSizeAllowancePerTickIgnition;
390    if (shared->bytecode_array()->Size() <= allowance) {
391      AttemptOnStackReplacement(frame);
392    }
393    return true;
394  }
395  return false;
396}
397
398OptimizationReason RuntimeProfiler::ShouldOptimizeIgnition(
399    JSFunction* function, JavaScriptFrame* frame) {
400  SharedFunctionInfo* shared = function->shared();
401  int ticks = shared->profiler_ticks();
402
403  if (shared->bytecode_array()->Size() > kMaxSizeOptIgnition) {
404    return OptimizationReason::kDoNotOptimize;
405  }
406
407  if (ticks >= kProfilerTicksBeforeOptimization) {
408    int typeinfo, generic, total, type_percentage, generic_percentage;
409    GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
410                &generic_percentage);
411    if (type_percentage >= FLAG_type_info_threshold &&
412        generic_percentage <= FLAG_generic_ic_threshold) {
413      // If this particular function hasn't had any ICs patched for enough
414      // ticks, optimize it now.
415      return OptimizationReason::kHotAndStable;
416    } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
417      return OptimizationReason::kHotWithoutMuchTypeInfo;
418    } else {
419      if (FLAG_trace_opt_verbose) {
420        PrintF("[not yet optimizing ");
421        function->PrintName();
422        PrintF(", not enough type info: %d/%d (%d%%)]\n", typeinfo, total,
423               type_percentage);
424      }
425      return OptimizationReason::kDoNotOptimize;
426    }
427  } else if (!any_ic_changed_ &&
428             shared->bytecode_array()->Size() < kMaxSizeEarlyOptIgnition) {
429    // If no IC was patched since the last tick and this function is very
430    // small, optimistically optimize it now.
431    int typeinfo, generic, total, type_percentage, generic_percentage;
432    GetICCounts(function, &typeinfo, &generic, &total, &type_percentage,
433                &generic_percentage);
434    if (type_percentage >= FLAG_type_info_threshold &&
435        generic_percentage <= FLAG_generic_ic_threshold) {
436      return OptimizationReason::kSmallFunction;
437    }
438  }
439  return OptimizationReason::kDoNotOptimize;
440}
441
442void RuntimeProfiler::MarkCandidatesForOptimization() {
443  HandleScope scope(isolate_);
444
445  if (!isolate_->use_crankshaft()) return;
446
447  DisallowHeapAllocation no_gc;
448
449  // Run through the JavaScript frames and collect them. If we already
450  // have a sample of the function, we mark it for optimizations
451  // (eagerly or lazily).
452  int frame_count = 0;
453  int frame_count_limit = FLAG_frame_count;
454  for (JavaScriptFrameIterator it(isolate_);
455       frame_count++ < frame_count_limit && !it.done();
456       it.Advance()) {
457    JavaScriptFrame* frame = it.frame();
458    JSFunction* function = frame->function();
459
460    Compiler::CompilationTier next_tier =
461        Compiler::NextCompilationTier(function);
462    if (function->shared()->IsInterpreted()) {
463      if (next_tier == Compiler::BASELINE) {
464        MaybeBaselineIgnition(function, frame);
465      } else {
466        DCHECK_EQ(next_tier, Compiler::OPTIMIZED);
467        MaybeOptimizeIgnition(function, frame);
468      }
469    } else {
470      DCHECK_EQ(next_tier, Compiler::OPTIMIZED);
471      MaybeOptimizeFullCodegen(function, frame, frame_count);
472    }
473
474    // Update shared function info ticks after checking for whether functions
475    // should be optimized to keep FCG (which updates ticks on code) and
476    // Ignition (which updates ticks on shared function info) in sync.
477    List<JSFunction*> functions(4);
478    frame->GetFunctions(&functions);
479    for (int i = functions.length(); --i >= 0;) {
480      SharedFunctionInfo* shared_function_info = functions[i]->shared();
481      int ticks = shared_function_info->profiler_ticks();
482      if (ticks < Smi::kMaxValue) {
483        shared_function_info->set_profiler_ticks(ticks + 1);
484      }
485    }
486  }
487  any_ic_changed_ = false;
488}
489
490}  // namespace internal
491}  // namespace v8
492