compiler.cc revision f91f0611dbaf29ca0f1d4aecb357ce243a19d2fa
1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler.h"
6
7#include <algorithm>
8#include <memory>
9
10#include "src/asmjs/asm-js.h"
11#include "src/asmjs/asm-typer.h"
12#include "src/ast/ast-numbering.h"
13#include "src/ast/prettyprinter.h"
14#include "src/ast/scopes.h"
15#include "src/bootstrapper.h"
16#include "src/codegen.h"
17#include "src/compilation-cache.h"
18#include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
19#include "src/compiler/pipeline.h"
20#include "src/crankshaft/hydrogen.h"
21#include "src/debug/debug.h"
22#include "src/debug/liveedit.h"
23#include "src/deoptimizer.h"
24#include "src/frames-inl.h"
25#include "src/full-codegen/full-codegen.h"
26#include "src/globals.h"
27#include "src/heap/heap.h"
28#include "src/interpreter/interpreter.h"
29#include "src/isolate-inl.h"
30#include "src/log-inl.h"
31#include "src/messages.h"
32#include "src/parsing/parser.h"
33#include "src/parsing/rewriter.h"
34#include "src/parsing/scanner-character-streams.h"
35#include "src/runtime-profiler.h"
36#include "src/snapshot/code-serializer.h"
37#include "src/vm-state-inl.h"
38
39namespace v8 {
40namespace internal {
41
42
43#define PARSE_INFO_GETTER(type, name)  \
44  type CompilationInfo::name() const { \
45    CHECK(parse_info());               \
46    return parse_info()->name();       \
47  }
48
49
50#define PARSE_INFO_GETTER_WITH_DEFAULT(type, name, def) \
51  type CompilationInfo::name() const {                  \
52    return parse_info() ? parse_info()->name() : def;   \
53  }
54
55
56PARSE_INFO_GETTER(Handle<Script>, script)
57PARSE_INFO_GETTER(FunctionLiteral*, literal)
58PARSE_INFO_GETTER_WITH_DEFAULT(DeclarationScope*, scope, nullptr)
59PARSE_INFO_GETTER_WITH_DEFAULT(Handle<Context>, context,
60                               Handle<Context>::null())
61PARSE_INFO_GETTER(Handle<SharedFunctionInfo>, shared_info)
62
63#undef PARSE_INFO_GETTER
64#undef PARSE_INFO_GETTER_WITH_DEFAULT
65
66// A wrapper around a CompilationInfo that detaches the Handles from
67// the underlying DeferredHandleScope and stores them in info_ on
68// destruction.
69class CompilationHandleScope BASE_EMBEDDED {
70 public:
71  explicit CompilationHandleScope(CompilationInfo* info)
72      : deferred_(info->isolate()), info_(info) {}
73  ~CompilationHandleScope() { info_->set_deferred_handles(deferred_.Detach()); }
74
75 private:
76  DeferredHandleScope deferred_;
77  CompilationInfo* info_;
78};
79
80// Helper that times a scoped region and records the elapsed time.
81struct ScopedTimer {
82  explicit ScopedTimer(base::TimeDelta* location) : location_(location) {
83    DCHECK(location_ != NULL);
84    timer_.Start();
85  }
86
87  ~ScopedTimer() { *location_ += timer_.Elapsed(); }
88
89  base::ElapsedTimer timer_;
90  base::TimeDelta* location_;
91};
92
93// ----------------------------------------------------------------------------
94// Implementation of CompilationInfo
95
96bool CompilationInfo::has_shared_info() const {
97  return parse_info_ && !parse_info_->shared_info().is_null();
98}
99
100CompilationInfo::CompilationInfo(ParseInfo* parse_info,
101                                 Handle<JSFunction> closure)
102    : CompilationInfo(parse_info, {}, Code::ComputeFlags(Code::FUNCTION), BASE,
103                      parse_info->isolate(), parse_info->zone()) {
104  closure_ = closure;
105
106  // Compiling for the snapshot typically results in different code than
107  // compiling later on. This means that code recompiled with deoptimization
108  // support won't be "equivalent" (as defined by SharedFunctionInfo::
109  // EnableDeoptimizationSupport), so it will replace the old code and all
110  // its type feedback. To avoid this, always compile functions in the snapshot
111  // with deoptimization support.
112  if (isolate_->serializer_enabled()) EnableDeoptimizationSupport();
113
114  if (FLAG_function_context_specialization) MarkAsFunctionContextSpecializing();
115  if (FLAG_turbo_inlining) MarkAsInliningEnabled();
116  if (FLAG_turbo_source_positions) MarkAsSourcePositionsEnabled();
117  if (FLAG_turbo_splitting) MarkAsSplittingEnabled();
118}
119
120CompilationInfo::CompilationInfo(Vector<const char> debug_name,
121                                 Isolate* isolate, Zone* zone,
122                                 Code::Flags code_flags)
123    : CompilationInfo(nullptr, debug_name, code_flags, STUB, isolate, zone) {}
124
125CompilationInfo::CompilationInfo(ParseInfo* parse_info,
126                                 Vector<const char> debug_name,
127                                 Code::Flags code_flags, Mode mode,
128                                 Isolate* isolate, Zone* zone)
129    : parse_info_(parse_info),
130      isolate_(isolate),
131      flags_(0),
132      code_flags_(code_flags),
133      mode_(mode),
134      osr_ast_id_(BailoutId::None()),
135      zone_(zone),
136      deferred_handles_(nullptr),
137      dependencies_(isolate, zone),
138      bailout_reason_(kNoReason),
139      prologue_offset_(Code::kPrologueOffsetNotSet),
140      track_positions_(FLAG_hydrogen_track_positions ||
141                       isolate->is_profiling()),
142      parameter_count_(0),
143      optimization_id_(-1),
144      osr_expr_stack_height_(0),
145      debug_name_(debug_name) {}
146
147CompilationInfo::~CompilationInfo() {
148  if (GetFlag(kDisableFutureOptimization) && has_shared_info()) {
149    shared_info()->DisableOptimization(bailout_reason());
150  }
151  dependencies()->Rollback();
152  delete deferred_handles_;
153}
154
155
156int CompilationInfo::num_parameters() const {
157  return !IsStub() ? scope()->num_parameters() : parameter_count_;
158}
159
160
161int CompilationInfo::num_parameters_including_this() const {
162  return num_parameters() + (is_this_defined() ? 1 : 0);
163}
164
165
166bool CompilationInfo::is_this_defined() const { return !IsStub(); }
167
168
169// Primitive functions are unlikely to be picked up by the stack-walking
170// profiler, so they trigger their own optimization when they're called
171// for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
172bool CompilationInfo::ShouldSelfOptimize() {
173  return FLAG_crankshaft &&
174         !(literal()->flags() & AstProperties::kDontSelfOptimize) &&
175         !literal()->dont_optimize() &&
176         literal()->scope()->AllowsLazyCompilation() &&
177         !shared_info()->optimization_disabled();
178}
179
180
181bool CompilationInfo::has_simple_parameters() {
182  return scope()->has_simple_parameters();
183}
184
185std::unique_ptr<char[]> CompilationInfo::GetDebugName() const {
186  if (parse_info() && parse_info()->literal()) {
187    AllowHandleDereference allow_deref;
188    return parse_info()->literal()->debug_name()->ToCString();
189  }
190  if (parse_info() && !parse_info()->shared_info().is_null()) {
191    return parse_info()->shared_info()->DebugName()->ToCString();
192  }
193  Vector<const char> name_vec = debug_name_;
194  if (name_vec.is_empty()) name_vec = ArrayVector("unknown");
195  std::unique_ptr<char[]> name(new char[name_vec.length() + 1]);
196  memcpy(name.get(), name_vec.start(), name_vec.length());
197  name[name_vec.length()] = '\0';
198  return name;
199}
200
201StackFrame::Type CompilationInfo::GetOutputStackFrameType() const {
202  switch (output_code_kind()) {
203    case Code::STUB:
204    case Code::BYTECODE_HANDLER:
205    case Code::HANDLER:
206    case Code::BUILTIN:
207#define CASE_KIND(kind) case Code::kind:
208      IC_KIND_LIST(CASE_KIND)
209#undef CASE_KIND
210      return StackFrame::STUB;
211    case Code::WASM_FUNCTION:
212      return StackFrame::WASM;
213    case Code::JS_TO_WASM_FUNCTION:
214      return StackFrame::JS_TO_WASM;
215    case Code::WASM_TO_JS_FUNCTION:
216      return StackFrame::WASM_TO_JS;
217    default:
218      UNIMPLEMENTED();
219      return StackFrame::NONE;
220  }
221}
222
223int CompilationInfo::GetDeclareGlobalsFlags() const {
224  DCHECK(DeclareGlobalsLanguageMode::is_valid(parse_info()->language_mode()));
225  return DeclareGlobalsEvalFlag::encode(parse_info()->is_eval()) |
226         DeclareGlobalsNativeFlag::encode(parse_info()->is_native()) |
227         DeclareGlobalsLanguageMode::encode(parse_info()->language_mode());
228}
229
230SourcePositionTableBuilder::RecordingMode
231CompilationInfo::SourcePositionRecordingMode() const {
232  return parse_info() && parse_info()->is_native()
233             ? SourcePositionTableBuilder::OMIT_SOURCE_POSITIONS
234             : SourcePositionTableBuilder::RECORD_SOURCE_POSITIONS;
235}
236
237bool CompilationInfo::ExpectsJSReceiverAsReceiver() {
238  return is_sloppy(parse_info()->language_mode()) && !parse_info()->is_native();
239}
240
241// ----------------------------------------------------------------------------
242// Implementation of CompilationJob
243
244CompilationJob::Status CompilationJob::PrepareJob() {
245  DCHECK(ThreadId::Current().Equals(info()->isolate()->thread_id()));
246  DisallowJavascriptExecution no_js(isolate());
247
248  if (FLAG_trace_opt && info()->IsOptimizing()) {
249    OFStream os(stdout);
250    os << "[compiling method " << Brief(*info()->closure()) << " using "
251       << compiler_name_;
252    if (info()->is_osr()) os << " OSR";
253    os << "]" << std::endl;
254  }
255
256  // Delegate to the underlying implementation.
257  DCHECK(state() == State::kReadyToPrepare);
258  ScopedTimer t(&time_taken_to_prepare_);
259  return UpdateState(PrepareJobImpl(), State::kReadyToExecute);
260}
261
262CompilationJob::Status CompilationJob::ExecuteJob() {
263  DisallowHeapAllocation no_allocation;
264  DisallowHandleAllocation no_handles;
265  DisallowHandleDereference no_deref;
266  DisallowCodeDependencyChange no_dependency_change;
267
268  // Delegate to the underlying implementation.
269  DCHECK(state() == State::kReadyToExecute);
270  ScopedTimer t(&time_taken_to_execute_);
271  return UpdateState(ExecuteJobImpl(), State::kReadyToFinalize);
272}
273
274CompilationJob::Status CompilationJob::FinalizeJob() {
275  DCHECK(ThreadId::Current().Equals(info()->isolate()->thread_id()));
276  DisallowCodeDependencyChange no_dependency_change;
277  DisallowJavascriptExecution no_js(isolate());
278  DCHECK(!info()->dependencies()->HasAborted());
279
280  // Delegate to the underlying implementation.
281  DCHECK(state() == State::kReadyToFinalize);
282  ScopedTimer t(&time_taken_to_finalize_);
283  return UpdateState(FinalizeJobImpl(), State::kSucceeded);
284}
285
286namespace {
287
288void AddWeakObjectToCodeDependency(Isolate* isolate, Handle<HeapObject> object,
289                                   Handle<Code> code) {
290  Handle<WeakCell> cell = Code::WeakCellFor(code);
291  Heap* heap = isolate->heap();
292  if (heap->InNewSpace(*object)) {
293    heap->AddWeakNewSpaceObjectToCodeDependency(object, cell);
294  } else {
295    Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object));
296    dep =
297        DependentCode::InsertWeakCode(dep, DependentCode::kWeakCodeGroup, cell);
298    heap->AddWeakObjectToCodeDependency(object, dep);
299  }
300}
301
302}  // namespace
303
304void CompilationJob::RegisterWeakObjectsInOptimizedCode(Handle<Code> code) {
305  // TODO(turbofan): Move this to pipeline.cc once Crankshaft dies.
306  Isolate* const isolate = code->GetIsolate();
307  DCHECK(code->is_optimized_code());
308  std::vector<Handle<Map>> maps;
309  std::vector<Handle<HeapObject>> objects;
310  {
311    DisallowHeapAllocation no_gc;
312    int const mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
313                          RelocInfo::ModeMask(RelocInfo::CELL);
314    for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
315      RelocInfo::Mode mode = it.rinfo()->rmode();
316      if (mode == RelocInfo::CELL &&
317          code->IsWeakObjectInOptimizedCode(it.rinfo()->target_cell())) {
318        objects.push_back(handle(it.rinfo()->target_cell(), isolate));
319      } else if (mode == RelocInfo::EMBEDDED_OBJECT &&
320                 code->IsWeakObjectInOptimizedCode(
321                     it.rinfo()->target_object())) {
322        Handle<HeapObject> object(HeapObject::cast(it.rinfo()->target_object()),
323                                  isolate);
324        if (object->IsMap()) {
325          maps.push_back(Handle<Map>::cast(object));
326        } else {
327          objects.push_back(object);
328        }
329      }
330    }
331  }
332  for (Handle<Map> map : maps) {
333    if (map->dependent_code()->IsEmpty(DependentCode::kWeakCodeGroup)) {
334      isolate->heap()->AddRetainedMap(map);
335    }
336    Map::AddDependentCode(map, DependentCode::kWeakCodeGroup, code);
337  }
338  for (Handle<HeapObject> object : objects) {
339    AddWeakObjectToCodeDependency(isolate, object, code);
340  }
341  code->set_can_have_weak_objects(true);
342}
343
344void CompilationJob::RecordOptimizationStats() {
345  DCHECK(info()->IsOptimizing());
346  Handle<JSFunction> function = info()->closure();
347  if (!function->IsOptimized()) {
348    // Concurrent recompilation and OSR may race.  Increment only once.
349    int opt_count = function->shared()->opt_count();
350    function->shared()->set_opt_count(opt_count + 1);
351  }
352  double ms_creategraph = time_taken_to_prepare_.InMillisecondsF();
353  double ms_optimize = time_taken_to_execute_.InMillisecondsF();
354  double ms_codegen = time_taken_to_finalize_.InMillisecondsF();
355  if (FLAG_trace_opt) {
356    PrintF("[optimizing ");
357    function->ShortPrint();
358    PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
359           ms_codegen);
360  }
361  if (FLAG_trace_opt_stats) {
362    static double compilation_time = 0.0;
363    static int compiled_functions = 0;
364    static int code_size = 0;
365
366    compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
367    compiled_functions++;
368    code_size += function->shared()->SourceSize();
369    PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
370           compiled_functions, code_size, compilation_time);
371  }
372  if (FLAG_hydrogen_stats) {
373    isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_prepare_,
374                                                    time_taken_to_execute_,
375                                                    time_taken_to_finalize_);
376  }
377}
378
379// ----------------------------------------------------------------------------
380// Local helper methods that make up the compilation pipeline.
381
382namespace {
383
384bool IsEvalToplevel(Handle<SharedFunctionInfo> shared) {
385  return shared->is_toplevel() && shared->script()->IsScript() &&
386         Script::cast(shared->script())->compilation_type() ==
387             Script::COMPILATION_TYPE_EVAL;
388}
389
390void RecordFunctionCompilation(CodeEventListener::LogEventsAndTags tag,
391                               CompilationInfo* info) {
392  // Log the code generation. If source information is available include
393  // script name and line number. Check explicitly whether logging is
394  // enabled as finding the line number is not free.
395  if (info->isolate()->logger()->is_logging_code_events() ||
396      info->isolate()->is_profiling()) {
397    Handle<SharedFunctionInfo> shared = info->shared_info();
398    Handle<Script> script = info->parse_info()->script();
399    Handle<AbstractCode> abstract_code =
400        info->has_bytecode_array()
401            ? Handle<AbstractCode>::cast(info->bytecode_array())
402            : Handle<AbstractCode>::cast(info->code());
403    if (abstract_code.is_identical_to(
404            info->isolate()->builtins()->CompileLazy())) {
405      return;
406    }
407    int line_num = Script::GetLineNumber(script, shared->start_position()) + 1;
408    int column_num =
409        Script::GetColumnNumber(script, shared->start_position()) + 1;
410    String* script_name = script->name()->IsString()
411                              ? String::cast(script->name())
412                              : info->isolate()->heap()->empty_string();
413    CodeEventListener::LogEventsAndTags log_tag =
414        Logger::ToNativeByScript(tag, *script);
415    PROFILE(info->isolate(),
416            CodeCreateEvent(log_tag, *abstract_code, *shared, script_name,
417                            line_num, column_num));
418  }
419}
420
421void EnsureFeedbackMetadata(CompilationInfo* info) {
422  DCHECK(info->has_shared_info());
423
424  // If no type feedback metadata exists, we create it now. At this point the
425  // AstNumbering pass has already run. Note the snapshot can contain outdated
426  // vectors for a different configuration, hence we also recreate a new vector
427  // when the function is not compiled (i.e. no code was serialized).
428
429  // TODO(mvstanton): reintroduce is_empty() predicate to feedback_metadata().
430  if (info->shared_info()->feedback_metadata()->length() == 0 ||
431      !info->shared_info()->is_compiled()) {
432    Handle<TypeFeedbackMetadata> feedback_metadata = TypeFeedbackMetadata::New(
433        info->isolate(), info->literal()->feedback_vector_spec());
434    info->shared_info()->set_feedback_metadata(*feedback_metadata);
435  }
436
437  // It's very important that recompiles do not alter the structure of the type
438  // feedback vector. Verify that the structure fits the function literal.
439  CHECK(!info->shared_info()->feedback_metadata()->SpecDiffersFrom(
440      info->literal()->feedback_vector_spec()));
441}
442
443bool ShouldUseIgnition(CompilationInfo* info) {
444  if (!FLAG_ignition) return false;
445
446  DCHECK(info->has_shared_info());
447
448  // When requesting debug code as a replacement for existing code, we provide
449  // the same kind as the existing code (to prevent implicit tier-change).
450  if (info->is_debug() && info->shared_info()->is_compiled()) {
451    return !info->shared_info()->HasBaselineCode();
452  }
453
454  // Since we can't OSR from Ignition, skip Ignition for asm.js functions.
455  if (info->shared_info()->asm_function()) {
456    return false;
457  }
458
459  // Checks whether top level functions should be passed by the filter.
460  if (info->shared_info()->is_toplevel()) {
461    Vector<const char> filter = CStrVector(FLAG_ignition_filter);
462    return (filter.length() == 0) || (filter.length() == 1 && filter[0] == '*');
463  }
464
465  // Finally respect the filter.
466  return info->shared_info()->PassesFilter(FLAG_ignition_filter);
467}
468
469int CodeAndMetadataSize(CompilationInfo* info) {
470  if (info->has_bytecode_array()) {
471    return info->bytecode_array()->SizeIncludingMetadata();
472  }
473  return info->code()->SizeIncludingMetadata();
474}
475
476bool GenerateUnoptimizedCode(CompilationInfo* info) {
477  bool success;
478  EnsureFeedbackMetadata(info);
479  if (FLAG_validate_asm && info->scope()->asm_module() &&
480      !info->shared_info()->is_asm_wasm_broken()) {
481    MaybeHandle<FixedArray> wasm_data;
482    wasm_data = AsmJs::ConvertAsmToWasm(info->parse_info());
483    if (!wasm_data.is_null()) {
484      info->shared_info()->set_asm_wasm_data(*wasm_data.ToHandleChecked());
485      info->SetCode(info->isolate()->builtins()->InstantiateAsmJs());
486      return true;
487    }
488  }
489  if (ShouldUseIgnition(info)) {
490    success = interpreter::Interpreter::MakeBytecode(info);
491  } else {
492    success = FullCodeGenerator::MakeCode(info);
493  }
494  if (success) {
495    Isolate* isolate = info->isolate();
496    Counters* counters = isolate->counters();
497    // TODO(4280): Rename counters from "baseline" to "unoptimized" eventually.
498    counters->total_baseline_code_size()->Increment(CodeAndMetadataSize(info));
499    counters->total_baseline_compile_count()->Increment(1);
500  }
501  return success;
502}
503
504bool CompileUnoptimizedCode(CompilationInfo* info) {
505  DCHECK(AllowCompilation::IsAllowed(info->isolate()));
506  if (!Compiler::Analyze(info->parse_info()) ||
507      !GenerateUnoptimizedCode(info)) {
508    Isolate* isolate = info->isolate();
509    if (!isolate->has_pending_exception()) isolate->StackOverflow();
510    return false;
511  }
512  return true;
513}
514
515void InstallSharedScopeInfo(CompilationInfo* info,
516                            Handle<SharedFunctionInfo> shared) {
517  Handle<ScopeInfo> scope_info = info->scope()->GetScopeInfo(info->isolate());
518  shared->set_scope_info(*scope_info);
519}
520
521void InstallSharedCompilationResult(CompilationInfo* info,
522                                    Handle<SharedFunctionInfo> shared) {
523  // TODO(mstarzinger): Compiling for debug code might be used to reveal inner
524  // functions via {FindSharedFunctionInfoInScript}, in which case we end up
525  // regenerating existing bytecode. Fix this!
526  if (info->is_debug() && info->has_bytecode_array()) {
527    shared->ClearBytecodeArray();
528  }
529  DCHECK(!info->code().is_null());
530  shared->ReplaceCode(*info->code());
531  if (info->has_bytecode_array()) {
532    DCHECK(!shared->HasBytecodeArray());  // Only compiled once.
533    shared->set_bytecode_array(*info->bytecode_array());
534  }
535}
536
537MUST_USE_RESULT MaybeHandle<Code> GetUnoptimizedCode(CompilationInfo* info) {
538  VMState<COMPILER> state(info->isolate());
539  PostponeInterruptsScope postpone(info->isolate());
540
541  // Create a canonical handle scope before internalizing parsed values if
542  // compiling bytecode. This is required for off-thread bytecode generation.
543  std::unique_ptr<CanonicalHandleScope> canonical;
544  if (FLAG_ignition) canonical.reset(new CanonicalHandleScope(info->isolate()));
545
546  // Parse and update CompilationInfo with the results.
547  if (!Parser::ParseStatic(info->parse_info())) return MaybeHandle<Code>();
548  Handle<SharedFunctionInfo> shared = info->shared_info();
549  DCHECK_EQ(shared->language_mode(), info->literal()->language_mode());
550
551  // Compile either unoptimized code or bytecode for the interpreter.
552  if (!CompileUnoptimizedCode(info)) return MaybeHandle<Code>();
553
554  // Update the shared function info with the scope info.
555  InstallSharedScopeInfo(info, shared);
556
557  // Install compilation result on the shared function info
558  InstallSharedCompilationResult(info, shared);
559
560  // Record the function compilation event.
561  RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
562
563  return info->code();
564}
565
566MUST_USE_RESULT MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
567    Handle<JSFunction> function, BailoutId osr_ast_id) {
568  Handle<SharedFunctionInfo> shared(function->shared());
569  DisallowHeapAllocation no_gc;
570  CodeAndLiterals cached = shared->SearchOptimizedCodeMap(
571      function->context()->native_context(), osr_ast_id);
572  if (cached.code != nullptr) {
573    // Caching of optimized code enabled and optimized code found.
574    if (cached.literals != nullptr) function->set_literals(cached.literals);
575    DCHECK(!cached.code->marked_for_deoptimization());
576    DCHECK(function->shared()->is_compiled());
577    return Handle<Code>(cached.code);
578  }
579  return MaybeHandle<Code>();
580}
581
582void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) {
583  Handle<Code> code = info->code();
584  if (code->kind() != Code::OPTIMIZED_FUNCTION) return;  // Nothing to do.
585
586  // Function context specialization folds-in the function context,
587  // so no sharing can occur.
588  if (info->is_function_context_specializing()) return;
589  // Frame specialization implies function context specialization.
590  DCHECK(!info->is_frame_specializing());
591
592  // TODO(4764): When compiling for OSR from bytecode, BailoutId might derive
593  // from bytecode offset and overlap with actual BailoutId. No caching!
594  if (info->is_osr() && info->is_optimizing_from_bytecode()) return;
595
596  // Cache optimized context-specific code.
597  Handle<JSFunction> function = info->closure();
598  Handle<SharedFunctionInfo> shared(function->shared());
599  Handle<LiteralsArray> literals(function->literals());
600  Handle<Context> native_context(function->context()->native_context());
601  SharedFunctionInfo::AddToOptimizedCodeMap(shared, native_context, code,
602                                            literals, info->osr_ast_id());
603
604  // Do not cache (native) context-independent code compiled for OSR.
605  if (code->is_turbofanned() && info->is_osr()) return;
606
607  // Cache optimized (native) context-independent code.
608  if (FLAG_turbo_cache_shared_code && code->is_turbofanned() &&
609      !info->is_native_context_specializing()) {
610    DCHECK(!info->is_function_context_specializing());
611    DCHECK(info->osr_ast_id().IsNone());
612    Handle<SharedFunctionInfo> shared(function->shared());
613    SharedFunctionInfo::AddSharedCodeToOptimizedCodeMap(shared, code);
614  }
615}
616
617bool Renumber(ParseInfo* parse_info) {
618  if (!AstNumbering::Renumber(parse_info->isolate(), parse_info->zone(),
619                              parse_info->literal())) {
620    return false;
621  }
622  Handle<SharedFunctionInfo> shared_info = parse_info->shared_info();
623  if (!shared_info.is_null()) {
624    FunctionLiteral* lit = parse_info->literal();
625    shared_info->set_ast_node_count(lit->ast_node_count());
626    if (lit->dont_optimize_reason() != kNoReason) {
627      shared_info->DisableOptimization(lit->dont_optimize_reason());
628    }
629    if (lit->flags() & AstProperties::kDontCrankshaft) {
630      shared_info->set_dont_crankshaft(true);
631    }
632  }
633  return true;
634}
635
636bool UseTurboFan(Handle<SharedFunctionInfo> shared) {
637  bool optimization_disabled = shared->optimization_disabled();
638  bool dont_crankshaft = shared->dont_crankshaft();
639
640  // Check the enabling conditions for Turbofan.
641  // 1. "use asm" code.
642  bool is_turbofanable_asm =
643      FLAG_turbo_asm && shared->asm_function() && !optimization_disabled;
644
645  // 2. Fallback for features unsupported by Crankshaft.
646  bool is_unsupported_by_crankshaft_but_turbofanable =
647      dont_crankshaft && strcmp(FLAG_turbo_filter, "~~") == 0 &&
648      !optimization_disabled;
649
650  // 3. Explicitly enabled by the command-line filter.
651  bool passes_turbo_filter = shared->PassesFilter(FLAG_turbo_filter);
652
653  return is_turbofanable_asm || is_unsupported_by_crankshaft_but_turbofanable ||
654         passes_turbo_filter;
655}
656
657bool GetOptimizedCodeNow(CompilationJob* job) {
658  CompilationInfo* info = job->info();
659  Isolate* isolate = info->isolate();
660
661  // Parsing is not required when optimizing from existing bytecode.
662  if (!info->is_optimizing_from_bytecode()) {
663    if (!Compiler::ParseAndAnalyze(info->parse_info())) return false;
664    EnsureFeedbackMetadata(info);
665  }
666
667  JSFunction::EnsureLiterals(info->closure());
668
669  TimerEventScope<TimerEventRecompileSynchronous> timer(isolate);
670  RuntimeCallTimerScope runtimeTimer(isolate,
671                                     &RuntimeCallStats::RecompileSynchronous);
672  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
673      isolate, &tracing::TraceEventStatsTable::RecompileSynchronous);
674
675  if (job->PrepareJob() != CompilationJob::SUCCEEDED ||
676      job->ExecuteJob() != CompilationJob::SUCCEEDED ||
677      job->FinalizeJob() != CompilationJob::SUCCEEDED) {
678    if (FLAG_trace_opt) {
679      PrintF("[aborted optimizing ");
680      info->closure()->ShortPrint();
681      PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
682    }
683    return false;
684  }
685
686  // Success!
687  job->RecordOptimizationStats();
688  DCHECK(!isolate->has_pending_exception());
689  InsertCodeIntoOptimizedCodeMap(info);
690  RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
691  return true;
692}
693
694bool GetOptimizedCodeLater(CompilationJob* job) {
695  CompilationInfo* info = job->info();
696  Isolate* isolate = info->isolate();
697
698  if (!isolate->optimizing_compile_dispatcher()->IsQueueAvailable()) {
699    if (FLAG_trace_concurrent_recompilation) {
700      PrintF("  ** Compilation queue full, will retry optimizing ");
701      info->closure()->ShortPrint();
702      PrintF(" later.\n");
703    }
704    return false;
705  }
706
707  if (isolate->heap()->HighMemoryPressure()) {
708    if (FLAG_trace_concurrent_recompilation) {
709      PrintF("  ** High memory pressure, will retry optimizing ");
710      info->closure()->ShortPrint();
711      PrintF(" later.\n");
712    }
713    return false;
714  }
715
716  // All handles below this point will be allocated in a deferred handle scope
717  // that is detached and handed off to the background thread when we return.
718  CompilationHandleScope handle_scope(info);
719
720  // Parsing is not required when optimizing from existing bytecode.
721  if (!info->is_optimizing_from_bytecode()) {
722    if (!Compiler::ParseAndAnalyze(info->parse_info())) return false;
723    EnsureFeedbackMetadata(info);
724  }
725
726  JSFunction::EnsureLiterals(info->closure());
727
728  // Reopen handles in the new CompilationHandleScope.
729  info->ReopenHandlesInNewHandleScope();
730  info->parse_info()->ReopenHandlesInNewHandleScope();
731
732  TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
733  RuntimeCallTimerScope runtimeTimer(info->isolate(),
734                                     &RuntimeCallStats::RecompileSynchronous);
735  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
736      isolate, &tracing::TraceEventStatsTable::RecompileSynchronous);
737
738  if (job->PrepareJob() != CompilationJob::SUCCEEDED) return false;
739  isolate->optimizing_compile_dispatcher()->QueueForOptimization(job);
740
741  if (FLAG_trace_concurrent_recompilation) {
742    PrintF("  ** Queued ");
743    info->closure()->ShortPrint();
744    PrintF(" for concurrent optimization.\n");
745  }
746  return true;
747}
748
749MaybeHandle<Code> GetOptimizedCode(Handle<JSFunction> function,
750                                   Compiler::ConcurrencyMode mode,
751                                   BailoutId osr_ast_id = BailoutId::None(),
752                                   JavaScriptFrame* osr_frame = nullptr) {
753  Isolate* isolate = function->GetIsolate();
754  Handle<SharedFunctionInfo> shared(function->shared(), isolate);
755
756  bool ignition_osr = osr_frame && osr_frame->is_interpreted();
757  DCHECK_IMPLIES(ignition_osr, !osr_ast_id.IsNone());
758  DCHECK_IMPLIES(ignition_osr, FLAG_ignition_osr);
759
760  // Flag combination --ignition-osr --no-turbo-from-bytecode is unsupported.
761  if (ignition_osr && !FLAG_turbo_from_bytecode) return MaybeHandle<Code>();
762
763  Handle<Code> cached_code;
764  // TODO(4764): When compiling for OSR from bytecode, BailoutId might derive
765  // from bytecode offset and overlap with actual BailoutId. No lookup!
766  if (!ignition_osr &&
767      GetCodeFromOptimizedCodeMap(function, osr_ast_id)
768          .ToHandle(&cached_code)) {
769    if (FLAG_trace_opt) {
770      PrintF("[found optimized code for ");
771      function->ShortPrint();
772      if (!osr_ast_id.IsNone()) {
773        PrintF(" at OSR AST id %d", osr_ast_id.ToInt());
774      }
775      PrintF("]\n");
776    }
777    return cached_code;
778  }
779
780  // Reset profiler ticks, function is no longer considered hot.
781  if (shared->is_compiled()) {
782    shared->code()->set_profiler_ticks(0);
783  }
784
785  VMState<COMPILER> state(isolate);
786  DCHECK(!isolate->has_pending_exception());
787  PostponeInterruptsScope postpone(isolate);
788  bool use_turbofan = UseTurboFan(shared) || ignition_osr;
789  std::unique_ptr<CompilationJob> job(
790      use_turbofan ? compiler::Pipeline::NewCompilationJob(function)
791                   : new HCompilationJob(function));
792  CompilationInfo* info = job->info();
793  ParseInfo* parse_info = info->parse_info();
794
795  info->SetOptimizingForOsr(osr_ast_id, osr_frame);
796
797  // Do not use Crankshaft/TurboFan if we need to be able to set break points.
798  if (info->shared_info()->HasDebugInfo()) {
799    info->AbortOptimization(kFunctionBeingDebugged);
800    return MaybeHandle<Code>();
801  }
802
803  // Limit the number of times we try to optimize functions.
804  const int kMaxOptCount =
805      FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000;
806  if (info->shared_info()->opt_count() > kMaxOptCount) {
807    info->AbortOptimization(kOptimizedTooManyTimes);
808    return MaybeHandle<Code>();
809  }
810
811  CanonicalHandleScope canonical(isolate);
812  TimerEventScope<TimerEventOptimizeCode> optimize_code_timer(isolate);
813  RuntimeCallTimerScope runtimeTimer(isolate, &RuntimeCallStats::OptimizeCode);
814  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
815      isolate, &tracing::TraceEventStatsTable::OptimizeCode);
816
817  // TurboFan can optimize directly from existing bytecode.
818  if (FLAG_turbo_from_bytecode && use_turbofan && ShouldUseIgnition(info)) {
819    if (!Compiler::EnsureBytecode(info)) {
820      if (isolate->has_pending_exception()) isolate->clear_pending_exception();
821      return MaybeHandle<Code>();
822    }
823    info->MarkAsOptimizeFromBytecode();
824  }
825
826  if (IsEvalToplevel(shared)) {
827    parse_info->set_eval();
828    if (function->context()->IsNativeContext()) parse_info->set_global();
829    parse_info->set_toplevel();
830    parse_info->set_allow_lazy_parsing(false);
831    parse_info->set_lazy(false);
832  }
833
834  if (mode == Compiler::CONCURRENT) {
835    if (GetOptimizedCodeLater(job.get())) {
836      job.release();  // The background recompile job owns this now.
837      return isolate->builtins()->InOptimizationQueue();
838    }
839  } else {
840    if (GetOptimizedCodeNow(job.get())) return info->code();
841  }
842
843  if (isolate->has_pending_exception()) isolate->clear_pending_exception();
844  return MaybeHandle<Code>();
845}
846
847class InterpreterActivationsFinder : public ThreadVisitor,
848                                     public OptimizedFunctionVisitor {
849 public:
850  explicit InterpreterActivationsFinder(SharedFunctionInfo* shared)
851      : shared_(shared), has_activations_(false) {}
852
853  void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
854    Address* activation_pc_address = nullptr;
855    JavaScriptFrameIterator it(isolate, top);
856    for (; !it.done(); it.Advance()) {
857      JavaScriptFrame* frame = it.frame();
858      if (FLAG_turbo_from_bytecode && FLAG_ignition_osr &&
859          frame->is_optimized() && frame->function()->shared() == shared_) {
860        // If we are able to optimize functions directly from bytecode, then
861        // there might be optimized OSR code active on the stack that is not
862        // reachable through a function. We count this as an activation.
863        has_activations_ = true;
864      }
865      if (frame->is_interpreted() && frame->function()->shared() == shared_) {
866        has_activations_ = true;
867        activation_pc_address = frame->pc_address();
868      }
869    }
870
871    if (activation_pc_address) {
872      activation_pc_addresses_.push_back(activation_pc_address);
873    }
874  }
875
876  void VisitFunction(JSFunction* function) {
877    if (function->Inlines(shared_)) has_activations_ = true;
878  }
879
880  void EnterContext(Context* context) {}
881  void LeaveContext(Context* context) {}
882
883  bool MarkActivationsForBaselineOnReturn(Isolate* isolate) {
884    if (activation_pc_addresses_.empty()) return false;
885
886    for (Address* activation_pc_address : activation_pc_addresses_) {
887      DCHECK(isolate->inner_pointer_to_code_cache()
888                 ->GetCacheEntry(*activation_pc_address)
889                 ->code->is_interpreter_trampoline_builtin());
890      *activation_pc_address =
891          isolate->builtins()->InterpreterMarkBaselineOnReturn()->entry();
892    }
893    return true;
894  }
895
896  bool has_activations() { return has_activations_; }
897
898 private:
899  SharedFunctionInfo* shared_;
900  bool has_activations_;
901  std::vector<Address*> activation_pc_addresses_;
902};
903
904bool HasInterpreterActivations(
905    Isolate* isolate, InterpreterActivationsFinder* activations_finder) {
906  activations_finder->VisitThread(isolate, isolate->thread_local_top());
907  isolate->thread_manager()->IterateArchivedThreads(activations_finder);
908  if (FLAG_turbo_from_bytecode) {
909    // If we are able to optimize functions directly from bytecode, then there
910    // might be optimized functions that rely on bytecode being around. We need
911    // to prevent switching the given function to baseline code in those cases.
912    Deoptimizer::VisitAllOptimizedFunctions(isolate, activations_finder);
913  }
914  return activations_finder->has_activations();
915}
916
917MaybeHandle<Code> GetBaselineCode(Handle<JSFunction> function) {
918  Isolate* isolate = function->GetIsolate();
919  VMState<COMPILER> state(isolate);
920  PostponeInterruptsScope postpone(isolate);
921  Zone zone(isolate->allocator());
922  ParseInfo parse_info(&zone, function);
923  CompilationInfo info(&parse_info, function);
924
925  // Reset profiler ticks, function is no longer considered hot.
926  if (function->shared()->HasBytecodeArray()) {
927    function->shared()->set_profiler_ticks(0);
928  }
929
930  // Nothing left to do if the function already has baseline code.
931  if (function->shared()->code()->kind() == Code::FUNCTION) {
932    return Handle<Code>(function->shared()->code());
933  }
934
935  // We do not switch to baseline code when the debugger might have created a
936  // copy of the bytecode with break slots to be able to set break points.
937  if (function->shared()->HasDebugInfo()) {
938    return MaybeHandle<Code>();
939  }
940
941  // TODO(4280): For now we do not switch generators or async functions to
942  // baseline code because there might be suspended activations stored in
943  // generator objects on the heap. We could eventually go directly to
944  // TurboFan in this case.
945  if (function->shared()->is_resumable()) {
946    return MaybeHandle<Code>();
947  }
948
949  // TODO(4280): For now we disable switching to baseline code in the presence
950  // of interpreter activations of the given function. The reasons is that the
951  // underlying bytecode is cleared below. Note that this only applies in case
952  // the --ignition-preserve-bytecode flag is not passed.
953  if (!FLAG_ignition_preserve_bytecode) {
954    InterpreterActivationsFinder activations_finder(function->shared());
955    if (HasInterpreterActivations(isolate, &activations_finder)) {
956      if (FLAG_trace_opt) {
957        OFStream os(stdout);
958        os << "[unable to switch " << Brief(*function) << " due to activations]"
959           << std::endl;
960      }
961
962      if (activations_finder.MarkActivationsForBaselineOnReturn(isolate)) {
963        if (FLAG_trace_opt) {
964          OFStream os(stdout);
965          os << "[marking " << Brief(function->shared())
966             << " for baseline recompilation on return]" << std::endl;
967        }
968      }
969
970      return MaybeHandle<Code>();
971    }
972  }
973
974  if (FLAG_trace_opt) {
975    OFStream os(stdout);
976    os << "[switching method " << Brief(*function) << " to baseline code]"
977       << std::endl;
978  }
979
980  // Parse and update CompilationInfo with the results.
981  if (!Parser::ParseStatic(info.parse_info())) return MaybeHandle<Code>();
982  Handle<SharedFunctionInfo> shared = info.shared_info();
983  DCHECK_EQ(shared->language_mode(), info.literal()->language_mode());
984
985  // Compile baseline code using the full code generator.
986  if (!Compiler::Analyze(info.parse_info()) ||
987      !FullCodeGenerator::MakeCode(&info)) {
988    if (!isolate->has_pending_exception()) isolate->StackOverflow();
989    return MaybeHandle<Code>();
990  }
991
992  // TODO(4280): For now we play it safe and remove the bytecode array when we
993  // switch to baseline code. We might consider keeping around the bytecode so
994  // that it can be used as the "source of truth" eventually. Note that this
995  // only applies in case the --ignition-preserve-bytecode flag is not passed.
996  if (!FLAG_ignition_preserve_bytecode) shared->ClearBytecodeArray();
997
998  // Update the shared function info with the scope info.
999  InstallSharedScopeInfo(&info, shared);
1000
1001  // Install compilation result on the shared function info
1002  InstallSharedCompilationResult(&info, shared);
1003
1004  // Record the function compilation event.
1005  RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, &info);
1006
1007  return info.code();
1008}
1009
1010MaybeHandle<Code> GetLazyCode(Handle<JSFunction> function) {
1011  Isolate* isolate = function->GetIsolate();
1012  DCHECK(!isolate->has_pending_exception());
1013  DCHECK(!function->is_compiled());
1014  TimerEventScope<TimerEventCompileCode> compile_timer(isolate);
1015  RuntimeCallTimerScope runtimeTimer(isolate,
1016                                     &RuntimeCallStats::CompileCodeLazy);
1017  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
1018      isolate, &tracing::TraceEventStatsTable::CompileCodeLazy);
1019  AggregatedHistogramTimerScope timer(isolate->counters()->compile_lazy());
1020
1021  if (FLAG_turbo_cache_shared_code) {
1022    Handle<Code> cached_code;
1023    if (GetCodeFromOptimizedCodeMap(function, BailoutId::None())
1024            .ToHandle(&cached_code)) {
1025      if (FLAG_trace_opt) {
1026        PrintF("[found optimized code for ");
1027        function->ShortPrint();
1028        PrintF(" during unoptimized compile]\n");
1029      }
1030      DCHECK(function->shared()->is_compiled());
1031      return cached_code;
1032    }
1033  }
1034
1035  if (function->shared()->is_compiled()) {
1036    return Handle<Code>(function->shared()->code());
1037  }
1038
1039  if (function->shared()->HasBytecodeArray()) {
1040    Handle<Code> entry = isolate->builtins()->InterpreterEntryTrampoline();
1041    function->shared()->ReplaceCode(*entry);
1042    return entry;
1043  }
1044
1045  Zone zone(isolate->allocator());
1046  ParseInfo parse_info(&zone, function);
1047  CompilationInfo info(&parse_info, function);
1048  Handle<Code> result;
1049  ASSIGN_RETURN_ON_EXCEPTION(isolate, result, GetUnoptimizedCode(&info), Code);
1050
1051  if (FLAG_always_opt) {
1052    Handle<Code> opt_code;
1053    if (GetOptimizedCode(function, Compiler::NOT_CONCURRENT)
1054            .ToHandle(&opt_code)) {
1055      result = opt_code;
1056    }
1057  }
1058
1059  return result;
1060}
1061
1062
1063Handle<SharedFunctionInfo> NewSharedFunctionInfoForLiteral(
1064    Isolate* isolate, FunctionLiteral* literal, Handle<Script> script) {
1065  Handle<Code> code = isolate->builtins()->CompileLazy();
1066  Handle<ScopeInfo> scope_info = handle(ScopeInfo::Empty(isolate));
1067  Handle<SharedFunctionInfo> result = isolate->factory()->NewSharedFunctionInfo(
1068      literal->name(), literal->materialized_literal_count(), literal->kind(),
1069      code, scope_info);
1070  SharedFunctionInfo::InitFromFunctionLiteral(result, literal);
1071  SharedFunctionInfo::SetScript(result, script);
1072  return result;
1073}
1074
1075Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) {
1076  Isolate* isolate = info->isolate();
1077  TimerEventScope<TimerEventCompileCode> timer(isolate);
1078  RuntimeCallTimerScope runtimeTimer(isolate, &RuntimeCallStats::CompileCode);
1079  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
1080      isolate, &tracing::TraceEventStatsTable::CompileCode);
1081  PostponeInterruptsScope postpone(isolate);
1082  DCHECK(!isolate->native_context().is_null());
1083  ParseInfo* parse_info = info->parse_info();
1084  Handle<Script> script = parse_info->script();
1085
1086  // Create a canonical handle scope before internalizing parsed values if
1087  // compiling bytecode. This is required for off-thread bytecode generation.
1088  std::unique_ptr<CanonicalHandleScope> canonical;
1089  if (FLAG_ignition) canonical.reset(new CanonicalHandleScope(isolate));
1090
1091  // TODO(svenpanne) Obscure place for this, perhaps move to OnBeforeCompile?
1092  FixedArray* array = isolate->native_context()->embedder_data();
1093  script->set_context_data(array->get(v8::Context::kDebugIdIndex));
1094
1095  isolate->debug()->OnBeforeCompile(script);
1096
1097  DCHECK(parse_info->is_eval() || parse_info->is_global() ||
1098         parse_info->is_module());
1099
1100  parse_info->set_toplevel();
1101
1102  Handle<SharedFunctionInfo> result;
1103
1104  { VMState<COMPILER> state(info->isolate());
1105    if (parse_info->literal() == NULL) {
1106      // Parse the script if needed (if it's already parsed, literal() is
1107      // non-NULL). If compiling for debugging, we may eagerly compile inner
1108      // functions, so do not parse lazily in that case.
1109      ScriptCompiler::CompileOptions options = parse_info->compile_options();
1110      bool parse_allow_lazy = (options == ScriptCompiler::kConsumeParserCache ||
1111                               String::cast(script->source())->length() >
1112                                   FLAG_min_preparse_length) &&
1113                              !info->is_debug();
1114
1115      // Consider parsing eagerly when targeting the code cache.
1116      parse_allow_lazy &= !(FLAG_serialize_eager && info->will_serialize());
1117
1118      // Consider parsing eagerly when targeting Ignition.
1119      parse_allow_lazy &= !(FLAG_ignition && FLAG_ignition_eager &&
1120                            !isolate->serializer_enabled());
1121
1122      parse_info->set_allow_lazy_parsing(parse_allow_lazy);
1123      if (!parse_allow_lazy &&
1124          (options == ScriptCompiler::kProduceParserCache ||
1125           options == ScriptCompiler::kConsumeParserCache)) {
1126        // We are going to parse eagerly, but we either 1) have cached data
1127        // produced by lazy parsing or 2) are asked to generate cached data.
1128        // Eager parsing cannot benefit from cached data, and producing cached
1129        // data while parsing eagerly is not implemented.
1130        parse_info->set_cached_data(nullptr);
1131        parse_info->set_compile_options(ScriptCompiler::kNoCompileOptions);
1132      }
1133
1134      if (!Parser::ParseStatic(parse_info)) {
1135        return Handle<SharedFunctionInfo>::null();
1136      }
1137    }
1138
1139    DCHECK(!info->is_debug() || !parse_info->allow_lazy_parsing());
1140
1141    FunctionLiteral* lit = parse_info->literal();
1142
1143    // Measure how long it takes to do the compilation; only take the
1144    // rest of the function into account to avoid overlap with the
1145    // parsing statistics.
1146    RuntimeCallTimerScope runtimeTimer(
1147        isolate, parse_info->is_eval() ? &RuntimeCallStats::CompileEval
1148                                       : &RuntimeCallStats::Compile);
1149    HistogramTimer* rate = parse_info->is_eval()
1150                               ? info->isolate()->counters()->compile_eval()
1151                               : info->isolate()->counters()->compile();
1152    HistogramTimerScope timer(rate);
1153    TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
1154        isolate,
1155        (parse_info->is_eval() ? &tracing::TraceEventStatsTable::CompileEval
1156                               : &tracing::TraceEventStatsTable::Compile));
1157
1158    // Allocate a shared function info object.
1159    DCHECK_EQ(kNoSourcePosition, lit->function_token_position());
1160    result = NewSharedFunctionInfoForLiteral(isolate, lit, script);
1161    result->set_is_toplevel(true);
1162    if (parse_info->is_eval()) {
1163      // Eval scripts cannot be (re-)compiled without context.
1164      result->set_allows_lazy_compilation_without_context(false);
1165    }
1166    parse_info->set_shared_info(result);
1167
1168    // Compile the code.
1169    if (!CompileUnoptimizedCode(info)) {
1170      return Handle<SharedFunctionInfo>::null();
1171    }
1172
1173    // Update the shared function info with the scope info.
1174    InstallSharedScopeInfo(info, result);
1175
1176    // Install compilation result on the shared function info
1177    InstallSharedCompilationResult(info, result);
1178
1179    Handle<String> script_name =
1180        script->name()->IsString()
1181            ? Handle<String>(String::cast(script->name()))
1182            : isolate->factory()->empty_string();
1183    CodeEventListener::LogEventsAndTags log_tag =
1184        parse_info->is_eval()
1185            ? CodeEventListener::EVAL_TAG
1186            : Logger::ToNativeByScript(CodeEventListener::SCRIPT_TAG, *script);
1187
1188    PROFILE(isolate, CodeCreateEvent(log_tag, result->abstract_code(), *result,
1189                                     *script_name));
1190
1191    if (!script.is_null())
1192      script->set_compilation_state(Script::COMPILATION_STATE_COMPILED);
1193  }
1194
1195  return result;
1196}
1197
1198}  // namespace
1199
1200// ----------------------------------------------------------------------------
1201// Implementation of Compiler
1202
1203bool Compiler::Analyze(ParseInfo* info) {
1204  DCHECK_NOT_NULL(info->literal());
1205  if (!Rewriter::Rewrite(info)) return false;
1206  Scope::Analyze(info);
1207  if (!Renumber(info)) return false;
1208  DCHECK_NOT_NULL(info->scope());
1209  return true;
1210}
1211
1212bool Compiler::ParseAndAnalyze(ParseInfo* info) {
1213  if (!Parser::ParseStatic(info)) return false;
1214  if (!Compiler::Analyze(info)) return false;
1215  DCHECK_NOT_NULL(info->literal());
1216  DCHECK_NOT_NULL(info->scope());
1217  return true;
1218}
1219
1220bool Compiler::Compile(Handle<JSFunction> function, ClearExceptionFlag flag) {
1221  if (function->is_compiled()) return true;
1222  Isolate* isolate = function->GetIsolate();
1223  DCHECK(AllowCompilation::IsAllowed(isolate));
1224
1225  // Start a compilation.
1226  Handle<Code> code;
1227  if (!GetLazyCode(function).ToHandle(&code)) {
1228    if (flag == CLEAR_EXCEPTION) {
1229      isolate->clear_pending_exception();
1230    }
1231    return false;
1232  }
1233
1234  // Install code on closure.
1235  function->ReplaceCode(*code);
1236  JSFunction::EnsureLiterals(function);
1237
1238  // Check postconditions on success.
1239  DCHECK(!isolate->has_pending_exception());
1240  DCHECK(function->shared()->is_compiled());
1241  DCHECK(function->is_compiled());
1242  return true;
1243}
1244
1245bool Compiler::CompileBaseline(Handle<JSFunction> function) {
1246  Isolate* isolate = function->GetIsolate();
1247  DCHECK(AllowCompilation::IsAllowed(isolate));
1248
1249  // Start a compilation.
1250  Handle<Code> code;
1251  if (!GetBaselineCode(function).ToHandle(&code)) {
1252    // Baseline generation failed, get unoptimized code.
1253    DCHECK(function->shared()->is_compiled());
1254    code = handle(function->shared()->code());
1255    isolate->clear_pending_exception();
1256  }
1257
1258  // Install code on closure.
1259  function->ReplaceCode(*code);
1260  JSFunction::EnsureLiterals(function);
1261
1262  // Check postconditions on success.
1263  DCHECK(!isolate->has_pending_exception());
1264  DCHECK(function->shared()->is_compiled());
1265  DCHECK(function->is_compiled());
1266  return true;
1267}
1268
1269bool Compiler::CompileOptimized(Handle<JSFunction> function,
1270                                ConcurrencyMode mode) {
1271  if (function->IsOptimized()) return true;
1272  Isolate* isolate = function->GetIsolate();
1273  DCHECK(AllowCompilation::IsAllowed(isolate));
1274
1275  // Start a compilation.
1276  Handle<Code> code;
1277  if (!GetOptimizedCode(function, mode).ToHandle(&code)) {
1278    // Optimization failed, get unoptimized code.
1279    DCHECK(!isolate->has_pending_exception());
1280    if (function->shared()->is_compiled()) {
1281      code = handle(function->shared()->code(), isolate);
1282    } else if (function->shared()->HasBytecodeArray()) {
1283      code = isolate->builtins()->InterpreterEntryTrampoline();
1284      function->shared()->ReplaceCode(*code);
1285    } else {
1286      Zone zone(isolate->allocator());
1287      ParseInfo parse_info(&zone, function);
1288      CompilationInfo info(&parse_info, function);
1289      if (!GetUnoptimizedCode(&info).ToHandle(&code)) {
1290        return false;
1291      }
1292    }
1293  }
1294
1295  // Install code on closure.
1296  function->ReplaceCode(*code);
1297  JSFunction::EnsureLiterals(function);
1298
1299  // Check postconditions on success.
1300  DCHECK(!isolate->has_pending_exception());
1301  DCHECK(function->shared()->is_compiled());
1302  DCHECK(function->is_compiled());
1303  return true;
1304}
1305
1306bool Compiler::CompileDebugCode(Handle<JSFunction> function) {
1307  Isolate* isolate = function->GetIsolate();
1308  DCHECK(AllowCompilation::IsAllowed(isolate));
1309
1310  // Start a compilation.
1311  Zone zone(isolate->allocator());
1312  ParseInfo parse_info(&zone, function);
1313  CompilationInfo info(&parse_info, Handle<JSFunction>::null());
1314  if (IsEvalToplevel(handle(function->shared()))) {
1315    parse_info.set_eval();
1316    if (function->context()->IsNativeContext()) parse_info.set_global();
1317    parse_info.set_toplevel();
1318    parse_info.set_allow_lazy_parsing(false);
1319    parse_info.set_lazy(false);
1320  }
1321  info.MarkAsDebug();
1322  if (GetUnoptimizedCode(&info).is_null()) {
1323    isolate->clear_pending_exception();
1324    return false;
1325  }
1326
1327  // Check postconditions on success.
1328  DCHECK(!isolate->has_pending_exception());
1329  DCHECK(function->shared()->is_compiled());
1330  DCHECK(function->shared()->HasDebugCode());
1331  return true;
1332}
1333
1334bool Compiler::CompileDebugCode(Handle<SharedFunctionInfo> shared) {
1335  Isolate* isolate = shared->GetIsolate();
1336  DCHECK(AllowCompilation::IsAllowed(isolate));
1337
1338  // Start a compilation.
1339  Zone zone(isolate->allocator());
1340  ParseInfo parse_info(&zone, shared);
1341  CompilationInfo info(&parse_info, Handle<JSFunction>::null());
1342  DCHECK(shared->allows_lazy_compilation_without_context());
1343  DCHECK(!IsEvalToplevel(shared));
1344  info.MarkAsDebug();
1345  if (GetUnoptimizedCode(&info).is_null()) {
1346    isolate->clear_pending_exception();
1347    return false;
1348  }
1349
1350  // Check postconditions on success.
1351  DCHECK(!isolate->has_pending_exception());
1352  DCHECK(shared->is_compiled());
1353  DCHECK(shared->HasDebugCode());
1354  return true;
1355}
1356
1357MaybeHandle<JSArray> Compiler::CompileForLiveEdit(Handle<Script> script) {
1358  Isolate* isolate = script->GetIsolate();
1359  DCHECK(AllowCompilation::IsAllowed(isolate));
1360
1361  // In order to ensure that live edit function info collection finds the newly
1362  // generated shared function infos, clear the script's list temporarily
1363  // and restore it at the end of this method.
1364  Handle<Object> old_function_infos(script->shared_function_infos(), isolate);
1365  script->set_shared_function_infos(Smi::FromInt(0));
1366
1367  // Start a compilation.
1368  Zone zone(isolate->allocator());
1369  ParseInfo parse_info(&zone, script);
1370  CompilationInfo info(&parse_info, Handle<JSFunction>::null());
1371  parse_info.set_global();
1372  info.MarkAsDebug();
1373
1374  // TODO(635): support extensions.
1375  const bool compilation_succeeded = !CompileToplevel(&info).is_null();
1376  Handle<JSArray> infos;
1377  if (compilation_succeeded) {
1378    // Check postconditions on success.
1379    DCHECK(!isolate->has_pending_exception());
1380    infos = LiveEditFunctionTracker::Collect(parse_info.literal(), script,
1381                                             &zone, isolate);
1382  }
1383
1384  // Restore the original function info list in order to remain side-effect
1385  // free as much as possible, since some code expects the old shared function
1386  // infos to stick around.
1387  script->set_shared_function_infos(*old_function_infos);
1388
1389  return infos;
1390}
1391
1392bool Compiler::EnsureBytecode(CompilationInfo* info) {
1393  DCHECK(ShouldUseIgnition(info));
1394  if (!info->shared_info()->HasBytecodeArray()) {
1395    DCHECK(!info->shared_info()->is_compiled());
1396    if (GetUnoptimizedCode(info).is_null()) return false;
1397  }
1398  DCHECK(info->shared_info()->HasBytecodeArray());
1399  return true;
1400}
1401
1402// TODO(turbofan): In the future, unoptimized code with deopt support could
1403// be generated lazily once deopt is triggered.
1404bool Compiler::EnsureDeoptimizationSupport(CompilationInfo* info) {
1405  DCHECK_NOT_NULL(info->literal());
1406  DCHECK_NOT_NULL(info->scope());
1407  Handle<SharedFunctionInfo> shared = info->shared_info();
1408  if (!shared->has_deoptimization_support()) {
1409    Zone zone(info->isolate()->allocator());
1410    CompilationInfo unoptimized(info->parse_info(), info->closure());
1411    unoptimized.EnableDeoptimizationSupport();
1412
1413    // TODO(4280): For now we do not switch generators or async functions to
1414    // baseline code because there might be suspended activations stored in
1415    // generator objects on the heap. We could eventually go directly to
1416    // TurboFan in this case.
1417    if (shared->is_resumable()) return false;
1418
1419    // TODO(4280): For now we disable switching to baseline code in the presence
1420    // of interpreter activations of the given function. The reasons is that the
1421    // underlying bytecode is cleared below. The expensive check for activations
1422    // only needs to be done when the given function has bytecode, otherwise we
1423    // can be sure there are no activations. Note that this only applies in case
1424    // the --ignition-preserve-bytecode flag is not passed.
1425    if (!FLAG_ignition_preserve_bytecode && shared->HasBytecodeArray()) {
1426      InterpreterActivationsFinder activations_finder(*shared);
1427      if (HasInterpreterActivations(info->isolate(), &activations_finder)) {
1428        return false;
1429      }
1430    }
1431
1432    // If the current code has reloc info for serialization, also include
1433    // reloc info for serialization for the new code, so that deopt support
1434    // can be added without losing IC state.
1435    if (shared->code()->kind() == Code::FUNCTION &&
1436        shared->code()->has_reloc_info_for_serialization()) {
1437      unoptimized.PrepareForSerializing();
1438    }
1439    EnsureFeedbackMetadata(&unoptimized);
1440    if (!FullCodeGenerator::MakeCode(&unoptimized)) return false;
1441
1442    // TODO(4280): For now we play it safe and remove the bytecode array when we
1443    // switch to baseline code. We might consider keeping around the bytecode so
1444    // that it can be used as the "source of truth" eventually. Note that this
1445    // only applies in case the --ignition-preserve-bytecode flag is not passed.
1446    if (!FLAG_ignition_preserve_bytecode && shared->HasBytecodeArray()) {
1447      shared->ClearBytecodeArray();
1448    }
1449
1450    // The scope info might not have been set if a lazily compiled
1451    // function is inlined before being called for the first time.
1452    if (shared->scope_info() == ScopeInfo::Empty(info->isolate())) {
1453      InstallSharedScopeInfo(info, shared);
1454    }
1455
1456    // Install compilation result on the shared function info
1457    shared->EnableDeoptimizationSupport(*unoptimized.code());
1458
1459    // The existing unoptimized code was replaced with the new one.
1460    RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG,
1461                              &unoptimized);
1462  }
1463  return true;
1464}
1465
1466// static
1467Compiler::CompilationTier Compiler::NextCompilationTier(JSFunction* function) {
1468  Handle<SharedFunctionInfo> shared(function->shared(), function->GetIsolate());
1469  if (shared->code()->is_interpreter_trampoline_builtin()) {
1470    if (FLAG_turbo_from_bytecode && UseTurboFan(shared)) {
1471      return OPTIMIZED;
1472    } else {
1473      return BASELINE;
1474    }
1475  } else {
1476    return OPTIMIZED;
1477  }
1478}
1479
1480MaybeHandle<JSFunction> Compiler::GetFunctionFromEval(
1481    Handle<String> source, Handle<SharedFunctionInfo> outer_info,
1482    Handle<Context> context, LanguageMode language_mode,
1483    ParseRestriction restriction, int eval_scope_position, int eval_position,
1484    int line_offset, int column_offset, Handle<Object> script_name,
1485    ScriptOriginOptions options) {
1486  Isolate* isolate = source->GetIsolate();
1487  int source_length = source->length();
1488  isolate->counters()->total_eval_size()->Increment(source_length);
1489  isolate->counters()->total_compile_size()->Increment(source_length);
1490
1491  CompilationCache* compilation_cache = isolate->compilation_cache();
1492  MaybeHandle<SharedFunctionInfo> maybe_shared_info =
1493      compilation_cache->LookupEval(source, outer_info, context, language_mode,
1494                                    eval_scope_position);
1495  Handle<SharedFunctionInfo> shared_info;
1496
1497  Handle<Script> script;
1498  if (!maybe_shared_info.ToHandle(&shared_info)) {
1499    script = isolate->factory()->NewScript(source);
1500    if (!script_name.is_null()) {
1501      script->set_name(*script_name);
1502      script->set_line_offset(line_offset);
1503      script->set_column_offset(column_offset);
1504    }
1505    script->set_origin_options(options);
1506    script->set_compilation_type(Script::COMPILATION_TYPE_EVAL);
1507    Script::SetEvalOrigin(script, outer_info, eval_position);
1508
1509    Zone zone(isolate->allocator());
1510    ParseInfo parse_info(&zone, script);
1511    CompilationInfo info(&parse_info, Handle<JSFunction>::null());
1512    parse_info.set_eval();
1513    if (context->IsNativeContext()) parse_info.set_global();
1514    parse_info.set_language_mode(language_mode);
1515    parse_info.set_parse_restriction(restriction);
1516    parse_info.set_context(context);
1517
1518    shared_info = CompileToplevel(&info);
1519
1520    if (shared_info.is_null()) {
1521      return MaybeHandle<JSFunction>();
1522    } else {
1523      // If caller is strict mode, the result must be in strict mode as well.
1524      DCHECK(is_sloppy(language_mode) ||
1525             is_strict(shared_info->language_mode()));
1526      compilation_cache->PutEval(source, outer_info, context, shared_info,
1527                                 eval_scope_position);
1528    }
1529  }
1530
1531  Handle<JSFunction> result =
1532      isolate->factory()->NewFunctionFromSharedFunctionInfo(
1533          shared_info, context, NOT_TENURED);
1534
1535  // OnAfterCompile has to be called after we create the JSFunction, which we
1536  // may require to recompile the eval for debugging, if we find a function
1537  // that contains break points in the eval script.
1538  isolate->debug()->OnAfterCompile(script);
1539
1540  return result;
1541}
1542
1543namespace {
1544
1545bool CodeGenerationFromStringsAllowed(Isolate* isolate,
1546                                      Handle<Context> context) {
1547  DCHECK(context->allow_code_gen_from_strings()->IsFalse(isolate));
1548  // Check with callback if set.
1549  AllowCodeGenerationFromStringsCallback callback =
1550      isolate->allow_code_gen_callback();
1551  if (callback == NULL) {
1552    // No callback set and code generation disallowed.
1553    return false;
1554  } else {
1555    // Callback set. Let it decide if code generation is allowed.
1556    VMState<EXTERNAL> state(isolate);
1557    return callback(v8::Utils::ToLocal(context));
1558  }
1559}
1560
1561}  // namespace
1562
1563MaybeHandle<JSFunction> Compiler::GetFunctionFromString(
1564    Handle<Context> context, Handle<String> source,
1565    ParseRestriction restriction) {
1566  Isolate* const isolate = context->GetIsolate();
1567  Handle<Context> native_context(context->native_context(), isolate);
1568
1569  // Check if native context allows code generation from
1570  // strings. Throw an exception if it doesn't.
1571  if (native_context->allow_code_gen_from_strings()->IsFalse(isolate) &&
1572      !CodeGenerationFromStringsAllowed(isolate, native_context)) {
1573    Handle<Object> error_message =
1574        native_context->ErrorMessageForCodeGenerationFromStrings();
1575    THROW_NEW_ERROR(isolate, NewEvalError(MessageTemplate::kCodeGenFromStrings,
1576                                          error_message),
1577                    JSFunction);
1578  }
1579
1580  // Compile source string in the native context.
1581  int eval_scope_position = 0;
1582  int eval_position = kNoSourcePosition;
1583  Handle<SharedFunctionInfo> outer_info(native_context->closure()->shared());
1584  return Compiler::GetFunctionFromEval(source, outer_info, native_context,
1585                                       SLOPPY, restriction, eval_scope_position,
1586                                       eval_position);
1587}
1588
1589Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfoForScript(
1590    Handle<String> source, Handle<Object> script_name, int line_offset,
1591    int column_offset, ScriptOriginOptions resource_options,
1592    Handle<Object> source_map_url, Handle<Context> context,
1593    v8::Extension* extension, ScriptData** cached_data,
1594    ScriptCompiler::CompileOptions compile_options, NativesFlag natives,
1595    bool is_module) {
1596  Isolate* isolate = source->GetIsolate();
1597  if (compile_options == ScriptCompiler::kNoCompileOptions) {
1598    cached_data = NULL;
1599  } else if (compile_options == ScriptCompiler::kProduceParserCache ||
1600             compile_options == ScriptCompiler::kProduceCodeCache) {
1601    DCHECK(cached_data && !*cached_data);
1602    DCHECK(extension == NULL);
1603    DCHECK(!isolate->debug()->is_loaded());
1604  } else {
1605    DCHECK(compile_options == ScriptCompiler::kConsumeParserCache ||
1606           compile_options == ScriptCompiler::kConsumeCodeCache);
1607    DCHECK(cached_data && *cached_data);
1608    DCHECK(extension == NULL);
1609  }
1610  int source_length = source->length();
1611  isolate->counters()->total_load_size()->Increment(source_length);
1612  isolate->counters()->total_compile_size()->Increment(source_length);
1613
1614  LanguageMode language_mode = construct_language_mode(FLAG_use_strict);
1615  CompilationCache* compilation_cache = isolate->compilation_cache();
1616
1617  // Do a lookup in the compilation cache but not for extensions.
1618  MaybeHandle<SharedFunctionInfo> maybe_result;
1619  Handle<SharedFunctionInfo> result;
1620  if (extension == NULL) {
1621    // First check per-isolate compilation cache.
1622    maybe_result = compilation_cache->LookupScript(
1623        source, script_name, line_offset, column_offset, resource_options,
1624        context, language_mode);
1625    if (maybe_result.is_null() && FLAG_serialize_toplevel &&
1626        compile_options == ScriptCompiler::kConsumeCodeCache &&
1627        !isolate->debug()->is_loaded()) {
1628      // Then check cached code provided by embedder.
1629      HistogramTimerScope timer(isolate->counters()->compile_deserialize());
1630      RuntimeCallTimerScope runtimeTimer(isolate,
1631                                         &RuntimeCallStats::CompileDeserialize);
1632      TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
1633          isolate, &tracing::TraceEventStatsTable::CompileDeserialize);
1634      Handle<SharedFunctionInfo> result;
1635      if (CodeSerializer::Deserialize(isolate, *cached_data, source)
1636              .ToHandle(&result)) {
1637        // Promote to per-isolate compilation cache.
1638        compilation_cache->PutScript(source, context, language_mode, result);
1639        return result;
1640      }
1641      // Deserializer failed. Fall through to compile.
1642    }
1643  }
1644
1645  base::ElapsedTimer timer;
1646  if (FLAG_profile_deserialization && FLAG_serialize_toplevel &&
1647      compile_options == ScriptCompiler::kProduceCodeCache) {
1648    timer.Start();
1649  }
1650
1651  if (!maybe_result.ToHandle(&result) ||
1652      (FLAG_serialize_toplevel &&
1653       compile_options == ScriptCompiler::kProduceCodeCache)) {
1654    // No cache entry found, or embedder wants a code cache. Compile the script.
1655
1656    // Create a script object describing the script to be compiled.
1657    Handle<Script> script = isolate->factory()->NewScript(source);
1658    if (natives == NATIVES_CODE) {
1659      script->set_type(Script::TYPE_NATIVE);
1660      script->set_hide_source(true);
1661    } else if (natives == EXTENSION_CODE) {
1662      script->set_type(Script::TYPE_EXTENSION);
1663      script->set_hide_source(true);
1664    }
1665    if (!script_name.is_null()) {
1666      script->set_name(*script_name);
1667      script->set_line_offset(line_offset);
1668      script->set_column_offset(column_offset);
1669    }
1670    script->set_origin_options(resource_options);
1671    if (!source_map_url.is_null()) {
1672      script->set_source_mapping_url(*source_map_url);
1673    }
1674
1675    // Compile the function and add it to the cache.
1676    Zone zone(isolate->allocator());
1677    ParseInfo parse_info(&zone, script);
1678    CompilationInfo info(&parse_info, Handle<JSFunction>::null());
1679    if (is_module) {
1680      parse_info.set_module();
1681    } else {
1682      parse_info.set_global();
1683    }
1684    if (compile_options != ScriptCompiler::kNoCompileOptions) {
1685      parse_info.set_cached_data(cached_data);
1686    }
1687    parse_info.set_compile_options(compile_options);
1688    parse_info.set_extension(extension);
1689    parse_info.set_context(context);
1690    if (FLAG_serialize_toplevel &&
1691        compile_options == ScriptCompiler::kProduceCodeCache) {
1692      info.PrepareForSerializing();
1693    }
1694
1695    parse_info.set_language_mode(
1696        static_cast<LanguageMode>(parse_info.language_mode() | language_mode));
1697    result = CompileToplevel(&info);
1698    if (extension == NULL && !result.is_null()) {
1699      compilation_cache->PutScript(source, context, language_mode, result);
1700      if (FLAG_serialize_toplevel &&
1701          compile_options == ScriptCompiler::kProduceCodeCache) {
1702        HistogramTimerScope histogram_timer(
1703            isolate->counters()->compile_serialize());
1704        RuntimeCallTimerScope runtimeTimer(isolate,
1705                                           &RuntimeCallStats::CompileSerialize);
1706        TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
1707            isolate, &tracing::TraceEventStatsTable::CompileSerialize);
1708        *cached_data = CodeSerializer::Serialize(isolate, result, source);
1709        if (FLAG_profile_deserialization) {
1710          PrintF("[Compiling and serializing took %0.3f ms]\n",
1711                 timer.Elapsed().InMillisecondsF());
1712        }
1713      }
1714    }
1715
1716    if (result.is_null()) {
1717      isolate->ReportPendingMessages();
1718    } else {
1719      isolate->debug()->OnAfterCompile(script);
1720    }
1721  } else if (result->ic_age() != isolate->heap()->global_ic_age()) {
1722    result->ResetForNewContext(isolate->heap()->global_ic_age());
1723  }
1724  return result;
1725}
1726
1727Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfoForStreamedScript(
1728    Handle<Script> script, ParseInfo* parse_info, int source_length) {
1729  Isolate* isolate = script->GetIsolate();
1730  // TODO(titzer): increment the counters in caller.
1731  isolate->counters()->total_load_size()->Increment(source_length);
1732  isolate->counters()->total_compile_size()->Increment(source_length);
1733
1734  LanguageMode language_mode = construct_language_mode(FLAG_use_strict);
1735  parse_info->set_language_mode(
1736      static_cast<LanguageMode>(parse_info->language_mode() | language_mode));
1737
1738  CompilationInfo compile_info(parse_info, Handle<JSFunction>::null());
1739
1740  // The source was parsed lazily, so compiling for debugging is not possible.
1741  DCHECK(!compile_info.is_debug());
1742
1743  Handle<SharedFunctionInfo> result = CompileToplevel(&compile_info);
1744  if (!result.is_null()) isolate->debug()->OnAfterCompile(script);
1745  return result;
1746}
1747
1748
1749Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfo(
1750    FunctionLiteral* literal, Handle<Script> script,
1751    CompilationInfo* outer_info) {
1752  // Precondition: code has been parsed and scopes have been analyzed.
1753  Isolate* isolate = outer_info->isolate();
1754  MaybeHandle<SharedFunctionInfo> maybe_existing;
1755
1756  // Find any previously allocated shared function info for the given literal.
1757  if (outer_info->shared_info()->never_compiled()) {
1758    // On the first compile, there are no existing shared function info for
1759    // inner functions yet, so do not try to find them. All bets are off for
1760    // live edit though.
1761    SLOW_DCHECK(script->FindSharedFunctionInfo(literal).is_null() ||
1762                isolate->debug()->live_edit_enabled());
1763  } else {
1764    maybe_existing = script->FindSharedFunctionInfo(literal);
1765  }
1766
1767  // We found an existing shared function info. If it has any sort of code
1768  // attached, don't worry about compiling and simply return it. Otherwise,
1769  // continue to decide whether to eagerly compile.
1770  // Note that we also carry on if we are compiling eager to obtain code for
1771  // debugging, unless we already have code with debug break slots.
1772  Handle<SharedFunctionInfo> existing;
1773  if (maybe_existing.ToHandle(&existing)) {
1774    DCHECK(!existing->is_toplevel());
1775    if (existing->HasBaselineCode() || existing->HasBytecodeArray()) {
1776      if (!outer_info->is_debug() || existing->HasDebugCode()) {
1777        return existing;
1778      }
1779    }
1780  }
1781
1782  // Allocate a shared function info object.
1783  Handle<SharedFunctionInfo> result;
1784  if (!maybe_existing.ToHandle(&result)) {
1785    result = NewSharedFunctionInfoForLiteral(isolate, literal, script);
1786    result->set_is_toplevel(false);
1787
1788    // If the outer function has been compiled before, we cannot be sure that
1789    // shared function info for this function literal has been created for the
1790    // first time. It may have already been compiled previously.
1791    result->set_never_compiled(outer_info->shared_info()->never_compiled());
1792  }
1793
1794  Zone zone(isolate->allocator());
1795  ParseInfo parse_info(&zone, script);
1796  CompilationInfo info(&parse_info, Handle<JSFunction>::null());
1797  parse_info.set_literal(literal);
1798  parse_info.set_shared_info(result);
1799  parse_info.set_language_mode(literal->scope()->language_mode());
1800  if (outer_info->will_serialize()) info.PrepareForSerializing();
1801  if (outer_info->is_debug()) info.MarkAsDebug();
1802
1803  // Determine if the function can be lazily compiled. This is necessary to
1804  // allow some of our builtin JS files to be lazily compiled. These
1805  // builtins cannot be handled lazily by the parser, since we have to know
1806  // if a function uses the special natives syntax, which is something the
1807  // parser records.
1808  // If the debugger requests compilation for break points, we cannot be
1809  // aggressive about lazy compilation, because it might trigger compilation
1810  // of functions without an outer context when setting a breakpoint through
1811  // Debug::FindSharedFunctionInfoInScript.
1812  bool allow_lazy = literal->AllowsLazyCompilation() && !info.is_debug();
1813  bool lazy = FLAG_lazy && allow_lazy && !literal->should_eager_compile();
1814
1815  // Consider compiling eagerly when targeting the code cache.
1816  lazy &= !(FLAG_serialize_eager && info.will_serialize());
1817
1818  // Consider compiling eagerly when compiling bytecode for Ignition.
1819  lazy &=
1820      !(FLAG_ignition && FLAG_ignition_eager && !isolate->serializer_enabled());
1821
1822  // Generate code
1823  TimerEventScope<TimerEventCompileCode> timer(isolate);
1824  RuntimeCallTimerScope runtimeTimer(isolate, &RuntimeCallStats::CompileCode);
1825  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
1826      isolate, &tracing::TraceEventStatsTable::CompileCode);
1827
1828  // Create a canonical handle scope if compiling ignition bytecode. This is
1829  // required by the constant array builder to de-duplicate common objects
1830  // without dereferencing handles.
1831  std::unique_ptr<CanonicalHandleScope> canonical;
1832  if (FLAG_ignition) canonical.reset(new CanonicalHandleScope(info.isolate()));
1833
1834  if (lazy) {
1835    info.SetCode(isolate->builtins()->CompileLazy());
1836  } else if (Renumber(info.parse_info()) && GenerateUnoptimizedCode(&info)) {
1837    // Code generation will ensure that the feedback vector is present and
1838    // appropriately sized.
1839    DCHECK(!info.code().is_null());
1840    if (literal->should_eager_compile() &&
1841        literal->should_be_used_once_hint()) {
1842      info.code()->MarkToBeExecutedOnce(isolate);
1843    }
1844    // Update the shared function info with the scope info.
1845    InstallSharedScopeInfo(&info, result);
1846    // Install compilation result on the shared function info.
1847    InstallSharedCompilationResult(&info, result);
1848  } else {
1849    return Handle<SharedFunctionInfo>::null();
1850  }
1851
1852  if (maybe_existing.is_null()) {
1853    RecordFunctionCompilation(CodeEventListener::FUNCTION_TAG, &info);
1854  }
1855
1856  return result;
1857}
1858
1859Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfoForNative(
1860    v8::Extension* extension, Handle<String> name) {
1861  Isolate* isolate = name->GetIsolate();
1862  v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);
1863
1864  // Compute the function template for the native function.
1865  v8::Local<v8::FunctionTemplate> fun_template =
1866      extension->GetNativeFunctionTemplate(v8_isolate,
1867                                           v8::Utils::ToLocal(name));
1868  DCHECK(!fun_template.IsEmpty());
1869
1870  // Instantiate the function and create a shared function info from it.
1871  Handle<JSFunction> fun = Handle<JSFunction>::cast(Utils::OpenHandle(
1872      *fun_template->GetFunction(v8_isolate->GetCurrentContext())
1873           .ToLocalChecked()));
1874  Handle<Code> code = Handle<Code>(fun->shared()->code());
1875  Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub());
1876  Handle<SharedFunctionInfo> shared = isolate->factory()->NewSharedFunctionInfo(
1877      name, fun->shared()->num_literals(), FunctionKind::kNormalFunction, code,
1878      Handle<ScopeInfo>(fun->shared()->scope_info()));
1879  shared->SetConstructStub(*construct_stub);
1880  shared->set_feedback_metadata(fun->shared()->feedback_metadata());
1881
1882  // Copy the function data to the shared function info.
1883  shared->set_function_data(fun->shared()->function_data());
1884  int parameters = fun->shared()->internal_formal_parameter_count();
1885  shared->set_internal_formal_parameter_count(parameters);
1886
1887  return shared;
1888}
1889
1890MaybeHandle<Code> Compiler::GetOptimizedCodeForOSR(Handle<JSFunction> function,
1891                                                   BailoutId osr_ast_id,
1892                                                   JavaScriptFrame* osr_frame) {
1893  DCHECK(!osr_ast_id.IsNone());
1894  DCHECK_NOT_NULL(osr_frame);
1895  return GetOptimizedCode(function, NOT_CONCURRENT, osr_ast_id, osr_frame);
1896}
1897
1898void Compiler::FinalizeCompilationJob(CompilationJob* raw_job) {
1899  // Take ownership of compilation job.  Deleting job also tears down the zone.
1900  std::unique_ptr<CompilationJob> job(raw_job);
1901  CompilationInfo* info = job->info();
1902  Isolate* isolate = info->isolate();
1903
1904  VMState<COMPILER> state(isolate);
1905  TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
1906  RuntimeCallTimerScope runtimeTimer(isolate,
1907                                     &RuntimeCallStats::RecompileSynchronous);
1908  TRACE_EVENT_RUNTIME_CALL_STATS_TRACING_SCOPED(
1909      isolate, &tracing::TraceEventStatsTable::RecompileSynchronous);
1910
1911  Handle<SharedFunctionInfo> shared = info->shared_info();
1912  shared->code()->set_profiler_ticks(0);
1913
1914  DCHECK(!shared->HasDebugInfo());
1915
1916  // 1) Optimization on the concurrent thread may have failed.
1917  // 2) The function may have already been optimized by OSR.  Simply continue.
1918  //    Except when OSR already disabled optimization for some reason.
1919  // 3) The code may have already been invalidated due to dependency change.
1920  // 4) Code generation may have failed.
1921  if (job->state() == CompilationJob::State::kReadyToFinalize) {
1922    if (shared->optimization_disabled()) {
1923      job->RetryOptimization(kOptimizationDisabled);
1924    } else if (info->dependencies()->HasAborted()) {
1925      job->RetryOptimization(kBailedOutDueToDependencyChange);
1926    } else if (job->FinalizeJob() == CompilationJob::SUCCEEDED) {
1927      job->RecordOptimizationStats();
1928      RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
1929      if (shared->SearchOptimizedCodeMap(info->context()->native_context(),
1930                                         info->osr_ast_id()).code == nullptr) {
1931        InsertCodeIntoOptimizedCodeMap(info);
1932      }
1933      if (FLAG_trace_opt) {
1934        PrintF("[completed optimizing ");
1935        info->closure()->ShortPrint();
1936        PrintF("]\n");
1937      }
1938      info->closure()->ReplaceCode(*info->code());
1939      return;
1940    }
1941  }
1942
1943  DCHECK(job->state() == CompilationJob::State::kFailed);
1944  if (FLAG_trace_opt) {
1945    PrintF("[aborted optimizing ");
1946    info->closure()->ShortPrint();
1947    PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
1948  }
1949  info->closure()->ReplaceCode(shared->code());
1950}
1951
1952void Compiler::PostInstantiation(Handle<JSFunction> function,
1953                                 PretenureFlag pretenure) {
1954  Handle<SharedFunctionInfo> shared(function->shared());
1955
1956  if (FLAG_always_opt && shared->allows_lazy_compilation()) {
1957    function->MarkForOptimization();
1958  }
1959
1960  CodeAndLiterals cached = shared->SearchOptimizedCodeMap(
1961      function->context()->native_context(), BailoutId::None());
1962  if (cached.code != nullptr) {
1963    // Caching of optimized code enabled and optimized code found.
1964    DCHECK(!cached.code->marked_for_deoptimization());
1965    DCHECK(function->shared()->is_compiled());
1966    function->ReplaceCode(cached.code);
1967  }
1968
1969  if (cached.literals != nullptr) {
1970    DCHECK(shared->is_compiled());
1971    function->set_literals(cached.literals);
1972  } else if (shared->is_compiled()) {
1973    // TODO(mvstanton): pass pretenure flag to EnsureLiterals.
1974    JSFunction::EnsureLiterals(function);
1975  }
1976}
1977
1978}  // namespace internal
1979}  // namespace v8
1980