1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_COMPILER_H_
6#define V8_COMPILER_H_
7
8#include "src/allocation.h"
9#include "src/ast.h"
10#include "src/bailout-reason.h"
11#include "src/zone.h"
12
13namespace v8 {
14namespace internal {
15
16class AstValueFactory;
17class HydrogenCodeStub;
18
19// ParseRestriction is used to restrict the set of valid statements in a
20// unit of compilation.  Restriction violations cause a syntax error.
21enum ParseRestriction {
22  NO_PARSE_RESTRICTION,         // All expressions are allowed.
23  ONLY_SINGLE_FUNCTION_LITERAL  // Only a single FunctionLiteral expression.
24};
25
26struct OffsetRange {
27  OffsetRange(int from, int to) : from(from), to(to) {}
28  int from;
29  int to;
30};
31
32
33class ScriptData {
34 public:
35  ScriptData(const byte* data, int length);
36  ~ScriptData() {
37    if (owns_data_) DeleteArray(data_);
38  }
39
40  const byte* data() const { return data_; }
41  int length() const { return length_; }
42
43  void AcquireDataOwnership() {
44    DCHECK(!owns_data_);
45    owns_data_ = true;
46  }
47
48  void ReleaseDataOwnership() {
49    DCHECK(owns_data_);
50    owns_data_ = false;
51  }
52
53 private:
54  bool owns_data_;
55  const byte* data_;
56  int length_;
57
58  DISALLOW_COPY_AND_ASSIGN(ScriptData);
59};
60
61// CompilationInfo encapsulates some information known at compile time.  It
62// is constructed based on the resources available at compile-time.
63class CompilationInfo {
64 public:
65  // Various configuration flags for a compilation, as well as some properties
66  // of the compiled code produced by a compilation.
67  enum Flag {
68    kLazy = 1 << 0,
69    kEval = 1 << 1,
70    kGlobal = 1 << 2,
71    kStrictMode = 1 << 3,
72    kThisHasUses = 1 << 4,
73    kNative = 1 << 5,
74    kDeferredCalling = 1 << 6,
75    kNonDeferredCalling = 1 << 7,
76    kSavesCallerDoubles = 1 << 8,
77    kRequiresFrame = 1 << 9,
78    kMustNotHaveEagerFrame = 1 << 10,
79    kDeoptimizationSupport = 1 << 11,
80    kDebug = 1 << 12,
81    kCompilingForDebugging = 1 << 13,
82    kParseRestriction = 1 << 14,
83    kSerializing = 1 << 15,
84    kContextSpecializing = 1 << 16,
85    kInliningEnabled = 1 << 17,
86    kTypingEnabled = 1 << 18,
87    kDisableFutureOptimization = 1 << 19
88  };
89
90  CompilationInfo(Handle<JSFunction> closure, Zone* zone);
91  CompilationInfo(Isolate* isolate, Zone* zone);
92  virtual ~CompilationInfo();
93
94  Isolate* isolate() const {
95    return isolate_;
96  }
97  Zone* zone() { return zone_; }
98  bool is_osr() const { return !osr_ast_id_.IsNone(); }
99  bool is_lazy() const { return GetFlag(kLazy); }
100  bool is_eval() const { return GetFlag(kEval); }
101  bool is_global() const { return GetFlag(kGlobal); }
102  StrictMode strict_mode() const {
103    return GetFlag(kStrictMode) ? STRICT : SLOPPY;
104  }
105  FunctionLiteral* function() const { return function_; }
106  Scope* scope() const { return scope_; }
107  Scope* global_scope() const { return global_scope_; }
108  Handle<Code> code() const { return code_; }
109  Handle<JSFunction> closure() const { return closure_; }
110  Handle<SharedFunctionInfo> shared_info() const { return shared_info_; }
111  Handle<Script> script() const { return script_; }
112  void set_script(Handle<Script> script) { script_ = script; }
113  HydrogenCodeStub* code_stub() const {return code_stub_; }
114  v8::Extension* extension() const { return extension_; }
115  ScriptData** cached_data() const { return cached_data_; }
116  ScriptCompiler::CompileOptions compile_options() const {
117    return compile_options_;
118  }
119  ScriptCompiler::ExternalSourceStream* source_stream() const {
120    return source_stream_;
121  }
122  ScriptCompiler::StreamedSource::Encoding source_stream_encoding() const {
123    return source_stream_encoding_;
124  }
125  Handle<Context> context() const { return context_; }
126  BailoutId osr_ast_id() const { return osr_ast_id_; }
127  Handle<Code> unoptimized_code() const { return unoptimized_code_; }
128  int opt_count() const { return opt_count_; }
129  int num_parameters() const;
130  int num_heap_slots() const;
131  Code::Flags flags() const;
132
133  void MarkAsEval() {
134    DCHECK(!is_lazy());
135    SetFlag(kEval);
136  }
137
138  void MarkAsGlobal() {
139    DCHECK(!is_lazy());
140    SetFlag(kGlobal);
141  }
142
143  void set_parameter_count(int parameter_count) {
144    DCHECK(IsStub());
145    parameter_count_ = parameter_count;
146  }
147
148  void set_this_has_uses(bool has_no_uses) {
149    SetFlag(kThisHasUses, has_no_uses);
150  }
151
152  bool this_has_uses() { return GetFlag(kThisHasUses); }
153
154  void SetStrictMode(StrictMode strict_mode) {
155    SetFlag(kStrictMode, strict_mode == STRICT);
156  }
157
158  void MarkAsNative() { SetFlag(kNative); }
159
160  bool is_native() const { return GetFlag(kNative); }
161
162  bool is_calling() const {
163    return GetFlag(kDeferredCalling) || GetFlag(kNonDeferredCalling);
164  }
165
166  void MarkAsDeferredCalling() { SetFlag(kDeferredCalling); }
167
168  bool is_deferred_calling() const { return GetFlag(kDeferredCalling); }
169
170  void MarkAsNonDeferredCalling() { SetFlag(kNonDeferredCalling); }
171
172  bool is_non_deferred_calling() const { return GetFlag(kNonDeferredCalling); }
173
174  void MarkAsSavesCallerDoubles() { SetFlag(kSavesCallerDoubles); }
175
176  bool saves_caller_doubles() const { return GetFlag(kSavesCallerDoubles); }
177
178  void MarkAsRequiresFrame() { SetFlag(kRequiresFrame); }
179
180  bool requires_frame() const { return GetFlag(kRequiresFrame); }
181
182  void MarkMustNotHaveEagerFrame() { SetFlag(kMustNotHaveEagerFrame); }
183
184  bool GetMustNotHaveEagerFrame() const {
185    return GetFlag(kMustNotHaveEagerFrame);
186  }
187
188  void MarkAsDebug() { SetFlag(kDebug); }
189
190  bool is_debug() const { return GetFlag(kDebug); }
191
192  void PrepareForSerializing() { SetFlag(kSerializing); }
193
194  bool will_serialize() const { return GetFlag(kSerializing); }
195
196  void MarkAsContextSpecializing() { SetFlag(kContextSpecializing); }
197
198  bool is_context_specializing() const { return GetFlag(kContextSpecializing); }
199
200  void MarkAsInliningEnabled() { SetFlag(kInliningEnabled); }
201
202  void MarkAsInliningDisabled() { SetFlag(kInliningEnabled, false); }
203
204  bool is_inlining_enabled() const { return GetFlag(kInliningEnabled); }
205
206  void MarkAsTypingEnabled() { SetFlag(kTypingEnabled); }
207
208  bool is_typing_enabled() const { return GetFlag(kTypingEnabled); }
209
210  bool IsCodePreAgingActive() const {
211    return FLAG_optimize_for_size && FLAG_age_code && !will_serialize() &&
212           !is_debug();
213  }
214
215  void SetParseRestriction(ParseRestriction restriction) {
216    SetFlag(kParseRestriction, restriction != NO_PARSE_RESTRICTION);
217  }
218
219  ParseRestriction parse_restriction() const {
220    return GetFlag(kParseRestriction) ? ONLY_SINGLE_FUNCTION_LITERAL
221                                      : NO_PARSE_RESTRICTION;
222  }
223
224  void SetFunction(FunctionLiteral* literal) {
225    DCHECK(function_ == NULL);
226    function_ = literal;
227  }
228  void PrepareForCompilation(Scope* scope);
229  void SetGlobalScope(Scope* global_scope) {
230    DCHECK(global_scope_ == NULL);
231    global_scope_ = global_scope;
232  }
233  Handle<TypeFeedbackVector> feedback_vector() const {
234    return feedback_vector_;
235  }
236  void SetCode(Handle<Code> code) { code_ = code; }
237  void SetExtension(v8::Extension* extension) {
238    DCHECK(!is_lazy());
239    extension_ = extension;
240  }
241  void SetCachedData(ScriptData** cached_data,
242                     ScriptCompiler::CompileOptions compile_options) {
243    compile_options_ = compile_options;
244    if (compile_options == ScriptCompiler::kNoCompileOptions) {
245      cached_data_ = NULL;
246    } else {
247      DCHECK(!is_lazy());
248      cached_data_ = cached_data;
249    }
250  }
251  void SetContext(Handle<Context> context) {
252    context_ = context;
253  }
254
255  void MarkCompilingForDebugging() { SetFlag(kCompilingForDebugging); }
256  bool IsCompilingForDebugging() { return GetFlag(kCompilingForDebugging); }
257  void MarkNonOptimizable() {
258    SetMode(CompilationInfo::NONOPT);
259  }
260
261  bool ShouldTrapOnDeopt() const {
262    return (FLAG_trap_on_deopt && IsOptimizing()) ||
263        (FLAG_trap_on_stub_deopt && IsStub());
264  }
265
266  bool has_global_object() const {
267    return !closure().is_null() &&
268        (closure()->context()->global_object() != NULL);
269  }
270
271  GlobalObject* global_object() const {
272    return has_global_object() ? closure()->context()->global_object() : NULL;
273  }
274
275  // Accessors for the different compilation modes.
276  bool IsOptimizing() const { return mode_ == OPTIMIZE; }
277  bool IsOptimizable() const { return mode_ == BASE; }
278  bool IsStub() const { return mode_ == STUB; }
279  void SetOptimizing(BailoutId osr_ast_id, Handle<Code> unoptimized) {
280    DCHECK(!shared_info_.is_null());
281    SetMode(OPTIMIZE);
282    osr_ast_id_ = osr_ast_id;
283    unoptimized_code_ = unoptimized;
284    optimization_id_ = isolate()->NextOptimizationId();
285  }
286
287  // Deoptimization support.
288  bool HasDeoptimizationSupport() const {
289    return GetFlag(kDeoptimizationSupport);
290  }
291  void EnableDeoptimizationSupport() {
292    DCHECK(IsOptimizable());
293    SetFlag(kDeoptimizationSupport);
294  }
295
296  // Determines whether or not to insert a self-optimization header.
297  bool ShouldSelfOptimize();
298
299  void set_deferred_handles(DeferredHandles* deferred_handles) {
300    DCHECK(deferred_handles_ == NULL);
301    deferred_handles_ = deferred_handles;
302  }
303
304  ZoneList<Handle<HeapObject> >* dependencies(
305      DependentCode::DependencyGroup group) {
306    if (dependencies_[group] == NULL) {
307      dependencies_[group] = new(zone_) ZoneList<Handle<HeapObject> >(2, zone_);
308    }
309    return dependencies_[group];
310  }
311
312  void CommitDependencies(Handle<Code> code);
313
314  void RollbackDependencies();
315
316  void SaveHandles() {
317    SaveHandle(&closure_);
318    SaveHandle(&shared_info_);
319    SaveHandle(&context_);
320    SaveHandle(&script_);
321    SaveHandle(&unoptimized_code_);
322  }
323
324  void AbortOptimization(BailoutReason reason) {
325    if (bailout_reason_ != kNoReason) bailout_reason_ = reason;
326    SetFlag(kDisableFutureOptimization);
327  }
328
329  void RetryOptimization(BailoutReason reason) {
330    if (bailout_reason_ != kNoReason) bailout_reason_ = reason;
331  }
332
333  BailoutReason bailout_reason() const { return bailout_reason_; }
334
335  int prologue_offset() const {
336    DCHECK_NE(Code::kPrologueOffsetNotSet, prologue_offset_);
337    return prologue_offset_;
338  }
339
340  void set_prologue_offset(int prologue_offset) {
341    DCHECK_EQ(Code::kPrologueOffsetNotSet, prologue_offset_);
342    prologue_offset_ = prologue_offset;
343  }
344
345  // Adds offset range [from, to) where fp register does not point
346  // to the current frame base. Used in CPU profiler to detect stack
347  // samples where top frame is not set up.
348  inline void AddNoFrameRange(int from, int to) {
349    if (no_frame_ranges_) no_frame_ranges_->Add(OffsetRange(from, to));
350  }
351
352  List<OffsetRange>* ReleaseNoFrameRanges() {
353    List<OffsetRange>* result = no_frame_ranges_;
354    no_frame_ranges_ = NULL;
355    return result;
356  }
357
358  Handle<Foreign> object_wrapper() {
359    if (object_wrapper_.is_null()) {
360      object_wrapper_ =
361          isolate()->factory()->NewForeign(reinterpret_cast<Address>(this));
362    }
363    return object_wrapper_;
364  }
365
366  void AbortDueToDependencyChange() {
367    DCHECK(!OptimizingCompilerThread::IsOptimizerThread(isolate()));
368    aborted_due_to_dependency_change_ = true;
369  }
370
371  bool HasAbortedDueToDependencyChange() const {
372    DCHECK(!OptimizingCompilerThread::IsOptimizerThread(isolate()));
373    return aborted_due_to_dependency_change_;
374  }
375
376  bool HasSameOsrEntry(Handle<JSFunction> function, BailoutId osr_ast_id) {
377    return osr_ast_id_ == osr_ast_id && function.is_identical_to(closure_);
378  }
379
380  int optimization_id() const { return optimization_id_; }
381
382  AstValueFactory* ast_value_factory() const { return ast_value_factory_; }
383  void SetAstValueFactory(AstValueFactory* ast_value_factory,
384                          bool owned = true) {
385    ast_value_factory_ = ast_value_factory;
386    ast_value_factory_owned_ = owned;
387  }
388
389  AstNode::IdGen* ast_node_id_gen() { return &ast_node_id_gen_; }
390
391 protected:
392  CompilationInfo(Handle<Script> script,
393                  Zone* zone);
394  CompilationInfo(Handle<SharedFunctionInfo> shared_info,
395                  Zone* zone);
396  CompilationInfo(HydrogenCodeStub* stub,
397                  Isolate* isolate,
398                  Zone* zone);
399  CompilationInfo(ScriptCompiler::ExternalSourceStream* source_stream,
400                  ScriptCompiler::StreamedSource::Encoding encoding,
401                  Isolate* isolate, Zone* zone);
402
403
404 private:
405  Isolate* isolate_;
406
407  // Compilation mode.
408  // BASE is generated by the full codegen, optionally prepared for bailouts.
409  // OPTIMIZE is optimized code generated by the Hydrogen-based backend.
410  // NONOPT is generated by the full codegen and is not prepared for
411  //   recompilation/bailouts.  These functions are never recompiled.
412  enum Mode {
413    BASE,
414    OPTIMIZE,
415    NONOPT,
416    STUB
417  };
418
419  void Initialize(Isolate* isolate, Mode mode, Zone* zone);
420
421  void SetMode(Mode mode) {
422    mode_ = mode;
423  }
424
425  void SetFlag(Flag flag) { flags_ |= flag; }
426
427  void SetFlag(Flag flag, bool value) {
428    flags_ = value ? flags_ | flag : flags_ & ~flag;
429  }
430
431  bool GetFlag(Flag flag) const { return (flags_ & flag) != 0; }
432
433  unsigned flags_;
434
435  // Fields filled in by the compilation pipeline.
436  // AST filled in by the parser.
437  FunctionLiteral* function_;
438  // The scope of the function literal as a convenience.  Set to indicate
439  // that scopes have been analyzed.
440  Scope* scope_;
441  // The global scope provided as a convenience.
442  Scope* global_scope_;
443  // For compiled stubs, the stub object
444  HydrogenCodeStub* code_stub_;
445  // The compiled code.
446  Handle<Code> code_;
447
448  // Possible initial inputs to the compilation process.
449  Handle<JSFunction> closure_;
450  Handle<SharedFunctionInfo> shared_info_;
451  Handle<Script> script_;
452  ScriptCompiler::ExternalSourceStream* source_stream_;  // Not owned.
453  ScriptCompiler::StreamedSource::Encoding source_stream_encoding_;
454
455  // Fields possibly needed for eager compilation, NULL by default.
456  v8::Extension* extension_;
457  ScriptData** cached_data_;
458  ScriptCompiler::CompileOptions compile_options_;
459
460  // The context of the caller for eval code, and the global context for a
461  // global script. Will be a null handle otherwise.
462  Handle<Context> context_;
463
464  // Used by codegen, ultimately kept rooted by the SharedFunctionInfo.
465  Handle<TypeFeedbackVector> feedback_vector_;
466
467  // Compilation mode flag and whether deoptimization is allowed.
468  Mode mode_;
469  BailoutId osr_ast_id_;
470  // The unoptimized code we patched for OSR may not be the shared code
471  // afterwards, since we may need to compile it again to include deoptimization
472  // data.  Keep track which code we patched.
473  Handle<Code> unoptimized_code_;
474
475  // The zone from which the compilation pipeline working on this
476  // CompilationInfo allocates.
477  Zone* zone_;
478
479  DeferredHandles* deferred_handles_;
480
481  ZoneList<Handle<HeapObject> >* dependencies_[DependentCode::kGroupCount];
482
483  template<typename T>
484  void SaveHandle(Handle<T> *object) {
485    if (!object->is_null()) {
486      Handle<T> handle(*(*object));
487      *object = handle;
488    }
489  }
490
491  BailoutReason bailout_reason_;
492
493  int prologue_offset_;
494
495  List<OffsetRange>* no_frame_ranges_;
496
497  // A copy of shared_info()->opt_count() to avoid handle deref
498  // during graph optimization.
499  int opt_count_;
500
501  // Number of parameters used for compilation of stubs that require arguments.
502  int parameter_count_;
503
504  Handle<Foreign> object_wrapper_;
505
506  int optimization_id_;
507
508  AstValueFactory* ast_value_factory_;
509  bool ast_value_factory_owned_;
510  AstNode::IdGen ast_node_id_gen_;
511
512  // This flag is used by the main thread to track whether this compilation
513  // should be abandoned due to dependency change.
514  bool aborted_due_to_dependency_change_;
515
516  DISALLOW_COPY_AND_ASSIGN(CompilationInfo);
517};
518
519
520// Exactly like a CompilationInfo, except also creates and enters a
521// Zone on construction and deallocates it on exit.
522class CompilationInfoWithZone: public CompilationInfo {
523 public:
524  explicit CompilationInfoWithZone(Handle<Script> script)
525      : CompilationInfo(script, &zone_),
526        zone_(script->GetIsolate()) {}
527  explicit CompilationInfoWithZone(Handle<SharedFunctionInfo> shared_info)
528      : CompilationInfo(shared_info, &zone_),
529        zone_(shared_info->GetIsolate()) {}
530  explicit CompilationInfoWithZone(Handle<JSFunction> closure)
531      : CompilationInfo(closure, &zone_),
532        zone_(closure->GetIsolate()) {}
533  CompilationInfoWithZone(HydrogenCodeStub* stub, Isolate* isolate)
534      : CompilationInfo(stub, isolate, &zone_),
535        zone_(isolate) {}
536  CompilationInfoWithZone(ScriptCompiler::ExternalSourceStream* stream,
537                          ScriptCompiler::StreamedSource::Encoding encoding,
538                          Isolate* isolate)
539      : CompilationInfo(stream, encoding, isolate, &zone_), zone_(isolate) {}
540
541  // Virtual destructor because a CompilationInfoWithZone has to exit the
542  // zone scope and get rid of dependent maps even when the destructor is
543  // called when cast as a CompilationInfo.
544  virtual ~CompilationInfoWithZone() {
545    RollbackDependencies();
546  }
547
548 private:
549  Zone zone_;
550};
551
552
553// A wrapper around a CompilationInfo that detaches the Handles from
554// the underlying DeferredHandleScope and stores them in info_ on
555// destruction.
556class CompilationHandleScope BASE_EMBEDDED {
557 public:
558  explicit CompilationHandleScope(CompilationInfo* info)
559      : deferred_(info->isolate()), info_(info) {}
560  ~CompilationHandleScope() {
561    info_->set_deferred_handles(deferred_.Detach());
562  }
563
564 private:
565  DeferredHandleScope deferred_;
566  CompilationInfo* info_;
567};
568
569
570class HGraph;
571class HOptimizedGraphBuilder;
572class LChunk;
573
574// A helper class that calls the three compilation phases in
575// Crankshaft and keeps track of its state.  The three phases
576// CreateGraph, OptimizeGraph and GenerateAndInstallCode can either
577// fail, bail-out to the full code generator or succeed.  Apart from
578// their return value, the status of the phase last run can be checked
579// using last_status().
580class OptimizedCompileJob: public ZoneObject {
581 public:
582  explicit OptimizedCompileJob(CompilationInfo* info)
583      : info_(info),
584        graph_builder_(NULL),
585        graph_(NULL),
586        chunk_(NULL),
587        last_status_(FAILED),
588        awaiting_install_(false) { }
589
590  enum Status {
591    FAILED, BAILED_OUT, SUCCEEDED
592  };
593
594  MUST_USE_RESULT Status CreateGraph();
595  MUST_USE_RESULT Status OptimizeGraph();
596  MUST_USE_RESULT Status GenerateCode();
597
598  Status last_status() const { return last_status_; }
599  CompilationInfo* info() const { return info_; }
600  Isolate* isolate() const { return info()->isolate(); }
601
602  Status RetryOptimization(BailoutReason reason) {
603    info_->RetryOptimization(reason);
604    return SetLastStatus(BAILED_OUT);
605  }
606
607  Status AbortOptimization(BailoutReason reason) {
608    info_->AbortOptimization(reason);
609    return SetLastStatus(BAILED_OUT);
610  }
611
612  void WaitForInstall() {
613    DCHECK(info_->is_osr());
614    awaiting_install_ = true;
615  }
616
617  bool IsWaitingForInstall() { return awaiting_install_; }
618
619 private:
620  CompilationInfo* info_;
621  HOptimizedGraphBuilder* graph_builder_;
622  HGraph* graph_;
623  LChunk* chunk_;
624  base::TimeDelta time_taken_to_create_graph_;
625  base::TimeDelta time_taken_to_optimize_;
626  base::TimeDelta time_taken_to_codegen_;
627  Status last_status_;
628  bool awaiting_install_;
629
630  MUST_USE_RESULT Status SetLastStatus(Status status) {
631    last_status_ = status;
632    return last_status_;
633  }
634  void RecordOptimizationStats();
635
636  struct Timer {
637    Timer(OptimizedCompileJob* job, base::TimeDelta* location)
638        : job_(job), location_(location) {
639      DCHECK(location_ != NULL);
640      timer_.Start();
641    }
642
643    ~Timer() {
644      *location_ += timer_.Elapsed();
645    }
646
647    OptimizedCompileJob* job_;
648    base::ElapsedTimer timer_;
649    base::TimeDelta* location_;
650  };
651};
652
653
654// The V8 compiler
655//
656// General strategy: Source code is translated into an anonymous function w/o
657// parameters which then can be executed. If the source code contains other
658// functions, they will be compiled and allocated as part of the compilation
659// of the source code.
660
661// Please note this interface returns shared function infos.  This means you
662// need to call Factory::NewFunctionFromSharedFunctionInfo before you have a
663// real function with a context.
664
665class Compiler : public AllStatic {
666 public:
667  MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCode(
668      Handle<JSFunction> function);
669  MUST_USE_RESULT static MaybeHandle<Code> GetLazyCode(
670      Handle<JSFunction> function);
671  MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCode(
672      Handle<SharedFunctionInfo> shared);
673  MUST_USE_RESULT static MaybeHandle<Code> GetDebugCode(
674      Handle<JSFunction> function);
675
676  static bool EnsureCompiled(Handle<JSFunction> function,
677                             ClearExceptionFlag flag);
678
679  static bool EnsureDeoptimizationSupport(CompilationInfo* info);
680
681  static void CompileForLiveEdit(Handle<Script> script);
682
683  // Compile a String source within a context for eval.
684  MUST_USE_RESULT static MaybeHandle<JSFunction> GetFunctionFromEval(
685      Handle<String> source, Handle<SharedFunctionInfo> outer_info,
686      Handle<Context> context, StrictMode strict_mode,
687      ParseRestriction restriction, int scope_position);
688
689  // Compile a String source within a context.
690  static Handle<SharedFunctionInfo> CompileScript(
691      Handle<String> source, Handle<Object> script_name, int line_offset,
692      int column_offset, bool is_shared_cross_origin, Handle<Context> context,
693      v8::Extension* extension, ScriptData** cached_data,
694      ScriptCompiler::CompileOptions compile_options,
695      NativesFlag is_natives_code);
696
697  static Handle<SharedFunctionInfo> CompileStreamedScript(CompilationInfo* info,
698                                                          int source_length);
699
700  // Create a shared function info object (the code may be lazily compiled).
701  static Handle<SharedFunctionInfo> BuildFunctionInfo(FunctionLiteral* node,
702                                                      Handle<Script> script,
703                                                      CompilationInfo* outer);
704
705  enum ConcurrencyMode { NOT_CONCURRENT, CONCURRENT };
706
707  // Generate and return optimized code or start a concurrent optimization job.
708  // In the latter case, return the InOptimizationQueue builtin.  On failure,
709  // return the empty handle.
710  MUST_USE_RESULT static MaybeHandle<Code> GetOptimizedCode(
711      Handle<JSFunction> function,
712      Handle<Code> current_code,
713      ConcurrencyMode mode,
714      BailoutId osr_ast_id = BailoutId::None());
715
716  // Generate and return code from previously queued optimization job.
717  // On failure, return the empty handle.
718  static Handle<Code> GetConcurrentlyOptimizedCode(OptimizedCompileJob* job);
719
720  static bool DebuggerWantsEagerCompilation(
721      CompilationInfo* info, bool allow_lazy_without_ctx = false);
722};
723
724
725class CompilationPhase BASE_EMBEDDED {
726 public:
727  CompilationPhase(const char* name, CompilationInfo* info);
728  ~CompilationPhase();
729
730 protected:
731  bool ShouldProduceTraceOutput() const;
732
733  const char* name() const { return name_; }
734  CompilationInfo* info() const { return info_; }
735  Isolate* isolate() const { return info()->isolate(); }
736  Zone* zone() { return &zone_; }
737
738 private:
739  const char* name_;
740  CompilationInfo* info_;
741  Zone zone_;
742  unsigned info_zone_start_allocation_size_;
743  base::ElapsedTimer timer_;
744
745  DISALLOW_COPY_AND_ASSIGN(CompilationPhase);
746};
747
748} }  // namespace v8::internal
749
750#endif  // V8_COMPILER_H_
751