1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_FULL_CODEGEN_FULL_CODEGEN_H_
6#define V8_FULL_CODEGEN_FULL_CODEGEN_H_
7
8#include "src/allocation.h"
9#include "src/assert-scope.h"
10#include "src/ast/ast.h"
11#include "src/ast/scopes.h"
12#include "src/bit-vector.h"
13#include "src/code-factory.h"
14#include "src/code-stubs.h"
15#include "src/codegen.h"
16#include "src/compiler.h"
17#include "src/deoptimizer.h"
18#include "src/globals.h"
19#include "src/objects.h"
20
21namespace v8 {
22namespace internal {
23
24// Forward declarations.
25class JumpPatchSite;
26
27// -----------------------------------------------------------------------------
28// Full code generator.
29
30class FullCodeGenerator: public AstVisitor {
31 public:
32  FullCodeGenerator(MacroAssembler* masm, CompilationInfo* info)
33      : masm_(masm),
34        info_(info),
35        isolate_(info->isolate()),
36        zone_(info->zone()),
37        scope_(info->scope()),
38        nesting_stack_(NULL),
39        loop_depth_(0),
40        try_catch_depth_(0),
41        operand_stack_depth_(0),
42        globals_(NULL),
43        context_(NULL),
44        bailout_entries_(info->HasDeoptimizationSupport()
45                             ? info->literal()->ast_node_count()
46                             : 0,
47                         info->zone()),
48        back_edges_(2, info->zone()),
49        handler_table_(info->zone()),
50        ic_total_count_(0) {
51    DCHECK(!info->IsStub());
52    Initialize();
53  }
54
55  void Initialize();
56
57  static bool MakeCode(CompilationInfo* info);
58
59  // Encode bailout state and pc-offset as a BitField<type, start, size>.
60  // Only use 30 bits because we encode the result as a smi.
61  class BailoutStateField : public BitField<Deoptimizer::BailoutState, 0, 1> {};
62  class PcField : public BitField<unsigned, 1, 30 - 1> {};
63
64  static const int kMaxBackEdgeWeight = 127;
65
66  // Platform-specific code size multiplier.
67#if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
68  static const int kCodeSizeMultiplier = 105;
69#elif V8_TARGET_ARCH_X64
70  static const int kCodeSizeMultiplier = 165;
71#elif V8_TARGET_ARCH_ARM
72  static const int kCodeSizeMultiplier = 149;
73#elif V8_TARGET_ARCH_ARM64
74  static const int kCodeSizeMultiplier = 220;
75#elif V8_TARGET_ARCH_PPC64
76  static const int kCodeSizeMultiplier = 200;
77#elif V8_TARGET_ARCH_PPC
78  static const int kCodeSizeMultiplier = 200;
79#elif V8_TARGET_ARCH_MIPS
80  static const int kCodeSizeMultiplier = 149;
81#elif V8_TARGET_ARCH_MIPS64
82  static const int kCodeSizeMultiplier = 149;
83#elif V8_TARGET_ARCH_S390
84// TODO(joransiu): Copied PPC value. Check this is sensible for S390.
85  static const int kCodeSizeMultiplier = 200;
86#elif V8_TARGET_ARCH_S390X
87// TODO(joransiu): Copied PPC value. Check this is sensible for S390X.
88  static const int kCodeSizeMultiplier = 200;
89#else
90#error Unsupported target architecture.
91#endif
92
93  static Register result_register();
94
95 private:
96  typedef Deoptimizer::BailoutState BailoutState;
97
98  class Breakable;
99  class Iteration;
100  class TryFinally;
101
102  class TestContext;
103
104  class NestedStatement BASE_EMBEDDED {
105   public:
106    explicit NestedStatement(FullCodeGenerator* codegen)
107        : codegen_(codegen),
108          stack_depth_at_target_(codegen->operand_stack_depth_) {
109      // Link into codegen's nesting stack.
110      previous_ = codegen->nesting_stack_;
111      codegen->nesting_stack_ = this;
112    }
113    virtual ~NestedStatement() {
114      // Unlink from codegen's nesting stack.
115      DCHECK_EQ(this, codegen_->nesting_stack_);
116      codegen_->nesting_stack_ = previous_;
117    }
118
119    virtual Breakable* AsBreakable() { return nullptr; }
120    virtual Iteration* AsIteration() { return nullptr; }
121    virtual TryFinally* AsTryFinally() { return nullptr; }
122
123    virtual bool IsContinueTarget(Statement* target) { return false; }
124    virtual bool IsBreakTarget(Statement* target) { return false; }
125    virtual bool IsTryFinally() { return false; }
126
127    // Notify the statement that we are exiting it via break, continue, or
128    // return and give it a chance to generate cleanup code.  Return the
129    // next outer statement in the nesting stack.  We accumulate in
130    // {*context_length} the number of context chain links to unwind as we
131    // traverse the nesting stack from an exit to its target.
132    virtual NestedStatement* Exit(int* context_length) { return previous_; }
133
134    // Determine the expected operand stack depth when this statement is being
135    // used as the target of an exit. The caller will drop to this depth.
136    int GetStackDepthAtTarget() { return stack_depth_at_target_; }
137
138   protected:
139    MacroAssembler* masm() { return codegen_->masm(); }
140
141    FullCodeGenerator* codegen_;
142    NestedStatement* previous_;
143    int stack_depth_at_target_;
144
145   private:
146    DISALLOW_COPY_AND_ASSIGN(NestedStatement);
147  };
148
149  // A breakable statement such as a block.
150  class Breakable : public NestedStatement {
151   public:
152    Breakable(FullCodeGenerator* codegen, BreakableStatement* statement)
153        : NestedStatement(codegen), statement_(statement) {
154    }
155
156    Breakable* AsBreakable() override { return this; }
157    bool IsBreakTarget(Statement* target) override {
158      return statement() == target;
159    }
160
161    BreakableStatement* statement() { return statement_; }
162    Label* break_label() { return &break_label_; }
163
164   private:
165    BreakableStatement* statement_;
166    Label break_label_;
167  };
168
169  // An iteration statement such as a while, for, or do loop.
170  class Iteration : public Breakable {
171   public:
172    Iteration(FullCodeGenerator* codegen, IterationStatement* statement)
173        : Breakable(codegen, statement) {
174    }
175
176    Iteration* AsIteration() override { return this; }
177    bool IsContinueTarget(Statement* target) override {
178      return statement() == target;
179    }
180
181    Label* continue_label() { return &continue_label_; }
182
183   private:
184    Label continue_label_;
185  };
186
187  // A nested block statement.
188  class NestedBlock : public Breakable {
189   public:
190    NestedBlock(FullCodeGenerator* codegen, Block* block)
191        : Breakable(codegen, block) {
192    }
193
194    NestedStatement* Exit(int* context_length) override {
195      auto block_scope = statement()->AsBlock()->scope();
196      if (block_scope != nullptr) {
197        if (block_scope->ContextLocalCount() > 0) ++(*context_length);
198      }
199      return previous_;
200    }
201  };
202
203  // A class literal expression
204  class NestedClassLiteral : public NestedStatement {
205   public:
206    NestedClassLiteral(FullCodeGenerator* codegen, ClassLiteral* lit)
207        : NestedStatement(codegen),
208          needs_context_(lit->scope() != nullptr &&
209                         lit->scope()->NeedsContext()) {}
210
211    NestedStatement* Exit(int* context_length) override {
212      if (needs_context_) ++(*context_length);
213      return previous_;
214    }
215
216   private:
217    const bool needs_context_;
218  };
219
220  class DeferredCommands {
221   public:
222    enum Command { kReturn, kThrow, kBreak, kContinue };
223    typedef int TokenId;
224    struct DeferredCommand {
225      Command command;
226      TokenId token;
227      Statement* target;
228    };
229
230    DeferredCommands(FullCodeGenerator* codegen, Label* finally_entry)
231        : codegen_(codegen),
232          commands_(codegen->zone()),
233          return_token_(TokenDispenserForFinally::kInvalidToken),
234          throw_token_(TokenDispenserForFinally::kInvalidToken),
235          finally_entry_(finally_entry) {}
236
237    void EmitCommands();
238
239    void RecordBreak(Statement* target);
240    void RecordContinue(Statement* target);
241    void RecordReturn();
242    void RecordThrow();
243    void EmitFallThrough();
244
245   private:
246    MacroAssembler* masm() { return codegen_->masm(); }
247    void EmitJumpToFinally(TokenId token);
248
249    FullCodeGenerator* codegen_;
250    ZoneVector<DeferredCommand> commands_;
251    TokenDispenserForFinally dispenser_;
252    TokenId return_token_;
253    TokenId throw_token_;
254    Label* finally_entry_;
255  };
256
257  // The try block of a try/finally statement.
258  class TryFinally : public NestedStatement {
259   public:
260    TryFinally(FullCodeGenerator* codegen, DeferredCommands* commands)
261        : NestedStatement(codegen), deferred_commands_(commands) {}
262
263    NestedStatement* Exit(int* context_length) override;
264
265    bool IsTryFinally() override { return true; }
266    TryFinally* AsTryFinally() override { return this; }
267
268    DeferredCommands* deferred_commands() { return deferred_commands_; }
269
270   private:
271    DeferredCommands* deferred_commands_;
272  };
273
274  // The body of a with or catch.
275  class WithOrCatch : public NestedStatement {
276   public:
277    explicit WithOrCatch(FullCodeGenerator* codegen)
278        : NestedStatement(codegen) {
279    }
280
281    NestedStatement* Exit(int* context_length) override {
282      ++(*context_length);
283      return previous_;
284    }
285  };
286
287  // A platform-specific utility to overwrite the accumulator register
288  // with a GC-safe value.
289  void ClearAccumulator();
290
291  // Determine whether or not to inline the smi case for the given
292  // operation.
293  bool ShouldInlineSmiCase(Token::Value op);
294
295  // Helper function to convert a pure value into a test context.  The value
296  // is expected on the stack or the accumulator, depending on the platform.
297  // See the platform-specific implementation for details.
298  void DoTest(Expression* condition,
299              Label* if_true,
300              Label* if_false,
301              Label* fall_through);
302  void DoTest(const TestContext* context);
303
304  // Helper function to split control flow and avoid a branch to the
305  // fall-through label if it is set up.
306#if V8_TARGET_ARCH_MIPS
307  void Split(Condition cc,
308             Register lhs,
309             const Operand&  rhs,
310             Label* if_true,
311             Label* if_false,
312             Label* fall_through);
313#elif V8_TARGET_ARCH_MIPS64
314  void Split(Condition cc,
315             Register lhs,
316             const Operand&  rhs,
317             Label* if_true,
318             Label* if_false,
319             Label* fall_through);
320#elif V8_TARGET_ARCH_PPC
321  void Split(Condition cc, Label* if_true, Label* if_false, Label* fall_through,
322             CRegister cr = cr7);
323#else  // All other arch.
324  void Split(Condition cc,
325             Label* if_true,
326             Label* if_false,
327             Label* fall_through);
328#endif
329
330  // Load the value of a known (PARAMETER, LOCAL, or CONTEXT) variable into
331  // a register.  Emits a context chain walk if if necessary (so does
332  // SetVar) so avoid calling both on the same variable.
333  void GetVar(Register destination, Variable* var);
334
335  // Assign to a known (PARAMETER, LOCAL, or CONTEXT) variable.  If it's in
336  // the context, the write barrier will be emitted and source, scratch0,
337  // scratch1 will be clobbered.  Emits a context chain walk if if necessary
338  // (so does GetVar) so avoid calling both on the same variable.
339  void SetVar(Variable* var,
340              Register source,
341              Register scratch0,
342              Register scratch1);
343
344  // An operand used to read/write a stack-allocated (PARAMETER or LOCAL)
345  // variable.  Writing does not need the write barrier.
346  MemOperand StackOperand(Variable* var);
347
348  // An operand used to read/write a known (PARAMETER, LOCAL, or CONTEXT)
349  // variable.  May emit code to traverse the context chain, loading the
350  // found context into the scratch register.  Writing to this operand will
351  // need the write barrier if location is CONTEXT.
352  MemOperand VarOperand(Variable* var, Register scratch);
353
354  void VisitForEffect(Expression* expr) {
355    if (FLAG_verify_operand_stack_depth) EmitOperandStackDepthCheck();
356    EffectContext context(this);
357    Visit(expr);
358    PrepareForBailout(expr, BailoutState::NO_REGISTERS);
359  }
360
361  void VisitForAccumulatorValue(Expression* expr) {
362    if (FLAG_verify_operand_stack_depth) EmitOperandStackDepthCheck();
363    AccumulatorValueContext context(this);
364    Visit(expr);
365    PrepareForBailout(expr, BailoutState::TOS_REGISTER);
366  }
367
368  void VisitForStackValue(Expression* expr) {
369    if (FLAG_verify_operand_stack_depth) EmitOperandStackDepthCheck();
370    StackValueContext context(this);
371    Visit(expr);
372    PrepareForBailout(expr, BailoutState::NO_REGISTERS);
373  }
374
375  void VisitForControl(Expression* expr,
376                       Label* if_true,
377                       Label* if_false,
378                       Label* fall_through) {
379    if (FLAG_verify_operand_stack_depth) EmitOperandStackDepthCheck();
380    TestContext context(this, expr, if_true, if_false, fall_through);
381    Visit(expr);
382    // For test contexts, we prepare for bailout before branching, not at
383    // the end of the entire expression.  This happens as part of visiting
384    // the expression.
385  }
386
387  void VisitInDuplicateContext(Expression* expr);
388
389  void VisitDeclarations(ZoneList<Declaration*>* declarations) override;
390  void DeclareModules(Handle<FixedArray> descriptions);
391  void DeclareGlobals(Handle<FixedArray> pairs);
392  int DeclareGlobalsFlags();
393
394  // Push, pop or drop values onto/from the operand stack.
395  void PushOperand(Register reg);
396  void PopOperand(Register reg);
397  void DropOperands(int count);
398
399  // Convenience helpers for pushing onto the operand stack.
400  void PushOperand(MemOperand operand);
401  void PushOperand(Handle<Object> handle);
402  void PushOperand(Smi* smi);
403
404  // Convenience helpers for pushing/popping multiple operands.
405  void PushOperands(Register reg1, Register reg2);
406  void PushOperands(Register reg1, Register reg2, Register reg3);
407  void PushOperands(Register reg1, Register reg2, Register reg3, Register reg4);
408  void PopOperands(Register reg1, Register reg2);
409
410  // Convenience helper for calling a runtime function that consumes arguments
411  // from the operand stack (only usable for functions with known arity).
412  void CallRuntimeWithOperands(Runtime::FunctionId function_id);
413
414  // Static tracking of the operand stack depth.
415  void OperandStackDepthDecrement(int count);
416  void OperandStackDepthIncrement(int count);
417
418  // Generate debug code that verifies that our static tracking of the operand
419  // stack depth is in sync with the actual operand stack during runtime.
420  void EmitOperandStackDepthCheck();
421
422  // Generate code to create an iterator result object.  The "value" property is
423  // set to a value popped from the stack, and "done" is set according to the
424  // argument.  The result object is left in the result register.
425  void EmitCreateIteratorResult(bool done);
426
427  // Try to perform a comparison as a fast inlined literal compare if
428  // the operands allow it.  Returns true if the compare operations
429  // has been matched and all code generated; false otherwise.
430  bool TryLiteralCompare(CompareOperation* compare);
431
432  // Platform-specific code for comparing the type of a value with
433  // a given literal string.
434  void EmitLiteralCompareTypeof(Expression* expr,
435                                Expression* sub_expr,
436                                Handle<String> check);
437
438  // Platform-specific code for equality comparison with a nil-like value.
439  void EmitLiteralCompareNil(CompareOperation* expr,
440                             Expression* sub_expr,
441                             NilValue nil);
442
443  // Bailout support.
444  void PrepareForBailout(Expression* node, Deoptimizer::BailoutState state);
445  void PrepareForBailoutForId(BailoutId id, Deoptimizer::BailoutState state);
446
447  // Returns a smi for the index into the FixedArray that backs the feedback
448  // vector
449  Smi* SmiFromSlot(FeedbackVectorSlot slot) const {
450    return Smi::FromInt(TypeFeedbackVector::GetIndexFromSpec(
451        literal()->feedback_vector_spec(), slot));
452  }
453
454  // Record a call's return site offset, used to rebuild the frame if the
455  // called function was inlined at the site.
456  void RecordJSReturnSite(Call* call);
457
458  // Prepare for bailout before a test (or compare) and branch.  If
459  // should_normalize, then the following comparison will not handle the
460  // canonical JS true value so we will insert a (dead) test against true at
461  // the actual bailout target from the optimized code. If not
462  // should_normalize, the true and false labels are ignored.
463  void PrepareForBailoutBeforeSplit(Expression* expr,
464                                    bool should_normalize,
465                                    Label* if_true,
466                                    Label* if_false);
467
468  // If enabled, emit debug code for checking that the current context is
469  // neither a with nor a catch context.
470  void EmitDebugCheckDeclarationContext(Variable* variable);
471
472  // This is meant to be called at loop back edges, |back_edge_target| is
473  // the jump target of the back edge and is used to approximate the amount
474  // of code inside the loop.
475  void EmitBackEdgeBookkeeping(IterationStatement* stmt,
476                               Label* back_edge_target);
477  // Record the OSR AST id corresponding to a back edge in the code.
478  void RecordBackEdge(BailoutId osr_ast_id);
479  // Emit a table of back edge ids, pcs and loop depths into the code stream.
480  // Return the offset of the start of the table.
481  unsigned EmitBackEdgeTable();
482
483  void EmitProfilingCounterDecrement(int delta);
484  void EmitProfilingCounterReset();
485
486  // Emit code to pop values from the stack associated with nested statements
487  // like try/catch, try/finally, etc, running the finallies and unwinding the
488  // handlers as needed. Also emits the return sequence if necessary (i.e.,
489  // if the return is not delayed by a finally block).
490  void EmitUnwindAndReturn();
491
492  // Platform-specific return sequence
493  void EmitReturnSequence();
494  void EmitProfilingCounterHandlingForReturnSequence(bool is_tail_call);
495
496  // Platform-specific code sequences for calls
497  void EmitCall(Call* expr, ConvertReceiverMode = ConvertReceiverMode::kAny);
498  void EmitSuperConstructorCall(Call* expr);
499  void EmitCallWithLoadIC(Call* expr);
500  void EmitSuperCallWithLoadIC(Call* expr);
501  void EmitKeyedCallWithLoadIC(Call* expr, Expression* key);
502  void EmitKeyedSuperCallWithLoadIC(Call* expr);
503  void EmitPossiblyEvalCall(Call* expr);
504
505#define FOR_EACH_FULL_CODE_INTRINSIC(F) \
506  F(IsSmi)                              \
507  F(IsArray)                            \
508  F(IsTypedArray)                       \
509  F(IsRegExp)                           \
510  F(IsJSProxy)                          \
511  F(Call)                               \
512  F(NewObject)                          \
513  F(ValueOf)                            \
514  F(StringCharFromCode)                 \
515  F(IsJSReceiver)                       \
516  F(MathPow)                            \
517  F(HasCachedArrayIndex)                \
518  F(GetCachedArrayIndex)                \
519  F(GetSuperConstructor)                \
520  F(DebugBreakInOptimizedCode)          \
521  F(ClassOf)                            \
522  F(StringCharCodeAt)                   \
523  F(SubString)                          \
524  F(RegExpExec)                         \
525  F(RegExpConstructResult)              \
526  F(ToInteger)                          \
527  F(NumberToString)                     \
528  F(ToString)                           \
529  F(ToLength)                           \
530  F(ToNumber)                           \
531  F(ToName)                             \
532  F(ToObject)                           \
533  F(DebugIsActive)                      \
534  F(CreateIterResultObject)
535
536#define GENERATOR_DECLARATION(Name) void Emit##Name(CallRuntime* call);
537  FOR_EACH_FULL_CODE_INTRINSIC(GENERATOR_DECLARATION)
538#undef GENERATOR_DECLARATION
539
540  void EmitIntrinsicAsStubCall(CallRuntime* expr, const Callable& callable);
541
542  // Emits call to respective code stub.
543  void EmitHasProperty();
544
545  // Platform-specific code for restoring context from current JS frame.
546  void RestoreContext();
547
548  // Platform-specific code for loading variables.
549  void EmitLoadGlobalCheckExtensions(VariableProxy* proxy,
550                                     TypeofMode typeof_mode, Label* slow);
551  MemOperand ContextSlotOperandCheckExtensions(Variable* var, Label* slow);
552  void EmitDynamicLookupFastCase(VariableProxy* proxy, TypeofMode typeof_mode,
553                                 Label* slow, Label* done);
554  void EmitGlobalVariableLoad(VariableProxy* proxy, TypeofMode typeof_mode);
555  void EmitVariableLoad(VariableProxy* proxy,
556                        TypeofMode typeof_mode = NOT_INSIDE_TYPEOF);
557
558  void EmitAccessor(ObjectLiteralProperty* property);
559
560  bool NeedsHoleCheckForLoad(VariableProxy* proxy);
561
562  // Expects the arguments and the function already pushed.
563  void EmitResolvePossiblyDirectEval(Call* expr);
564
565  // Platform-specific support for allocating a new closure based on
566  // the given function info.
567  void EmitNewClosure(Handle<SharedFunctionInfo> info, bool pretenure);
568
569  // Re-usable portions of CallRuntime
570  void EmitLoadJSRuntimeFunction(CallRuntime* expr);
571  void EmitCallJSRuntimeFunction(CallRuntime* expr);
572
573  // Load a value from a named property.
574  // The receiver is left on the stack by the IC.
575  void EmitNamedPropertyLoad(Property* expr);
576
577  // Load a value from super.named property.
578  // Expect receiver ('this' value) and home_object on the stack.
579  void EmitNamedSuperPropertyLoad(Property* expr);
580
581  // Load a value from super[keyed] property.
582  // Expect receiver ('this' value), home_object and key on the stack.
583  void EmitKeyedSuperPropertyLoad(Property* expr);
584
585  // Load a value from a keyed property.
586  // The receiver and the key is left on the stack by the IC.
587  void EmitKeyedPropertyLoad(Property* expr);
588
589  // Adds the properties to the class (function) object and to its prototype.
590  // Expects the class (function) in the accumulator. The class (function) is
591  // in the accumulator after installing all the properties.
592  void EmitClassDefineProperties(ClassLiteral* lit);
593
594  // Pushes the property key as a Name on the stack.
595  void EmitPropertyKey(ObjectLiteralProperty* property, BailoutId bailout_id);
596
597  // Apply the compound assignment operator. Expects the left operand on top
598  // of the stack and the right one in the accumulator.
599  void EmitBinaryOp(BinaryOperation* expr, Token::Value op);
600
601  // Helper functions for generating inlined smi code for certain
602  // binary operations.
603  void EmitInlineSmiBinaryOp(BinaryOperation* expr,
604                             Token::Value op,
605                             Expression* left,
606                             Expression* right);
607
608  // Assign to the given expression as if via '='. The right-hand-side value
609  // is expected in the accumulator. slot is only used if FLAG_vector_stores
610  // is true.
611  void EmitAssignment(Expression* expr, FeedbackVectorSlot slot);
612
613  // Complete a variable assignment.  The right-hand-side value is expected
614  // in the accumulator.
615  void EmitVariableAssignment(Variable* var, Token::Value op,
616                              FeedbackVectorSlot slot);
617
618  // Helper functions to EmitVariableAssignment
619  void EmitStoreToStackLocalOrContextSlot(Variable* var,
620                                          MemOperand location);
621
622  // Complete a named property assignment.  The receiver is expected on top
623  // of the stack and the right-hand-side value in the accumulator.
624  void EmitNamedPropertyAssignment(Assignment* expr);
625
626  // Complete a super named property assignment. The right-hand-side value
627  // is expected in accumulator.
628  void EmitNamedSuperPropertyStore(Property* prop);
629
630  // Complete a super named property assignment. The right-hand-side value
631  // is expected in accumulator.
632  void EmitKeyedSuperPropertyStore(Property* prop);
633
634  // Complete a keyed property assignment.  The receiver and key are
635  // expected on top of the stack and the right-hand-side value in the
636  // accumulator.
637  void EmitKeyedPropertyAssignment(Assignment* expr);
638
639  static bool NeedsHomeObject(Expression* expr) {
640    return FunctionLiteral::NeedsHomeObject(expr);
641  }
642
643  // Adds the [[HomeObject]] to |initializer| if it is a FunctionLiteral.
644  // The value of the initializer is expected to be at the top of the stack.
645  // |offset| is the offset in the stack where the home object can be found.
646  void EmitSetHomeObject(Expression* initializer, int offset,
647                         FeedbackVectorSlot slot);
648
649  void EmitSetHomeObjectAccumulator(Expression* initializer, int offset,
650                                    FeedbackVectorSlot slot);
651
652  void EmitLoadStoreICSlot(FeedbackVectorSlot slot);
653
654  void CallIC(Handle<Code> code,
655              TypeFeedbackId id = TypeFeedbackId::None());
656
657  void CallLoadIC(TypeFeedbackId id = TypeFeedbackId::None());
658  // Inside typeof reference errors are never thrown.
659  void CallLoadGlobalIC(TypeofMode typeof_mode,
660                        TypeFeedbackId id = TypeFeedbackId::None());
661  void CallStoreIC(TypeFeedbackId id = TypeFeedbackId::None());
662
663  void SetFunctionPosition(FunctionLiteral* fun);
664  void SetReturnPosition(FunctionLiteral* fun);
665
666  enum InsertBreak { INSERT_BREAK, SKIP_BREAK };
667
668  // During stepping we want to be able to break at each statement, but not at
669  // every (sub-)expression. That is why by default we insert breaks at every
670  // statement position, but not at every expression position, unless stated
671  // otherwise.
672  void SetStatementPosition(Statement* stmt,
673                            InsertBreak insert_break = INSERT_BREAK);
674  void SetExpressionPosition(Expression* expr);
675
676  // Consider an expression a statement. As such, we also insert a break.
677  // This is used in loop headers where we want to break for each iteration.
678  void SetExpressionAsStatementPosition(Expression* expr);
679
680  void SetCallPosition(Expression* expr,
681                       TailCallMode tail_call_mode = TailCallMode::kDisallow);
682
683  void SetConstructCallPosition(Expression* expr) {
684    // Currently call and construct calls are treated the same wrt debugging.
685    SetCallPosition(expr);
686  }
687
688  // Non-local control flow support.
689  void EnterTryBlock(int handler_index, Label* handler);
690  void ExitTryBlock(int handler_index);
691  void EnterFinallyBlock();
692  void ExitFinallyBlock();
693  void ClearPendingMessage();
694
695  void EmitContinue(Statement* target);
696  void EmitBreak(Statement* target);
697
698  // Loop nesting counter.
699  int loop_depth() { return loop_depth_; }
700  void increment_loop_depth() { loop_depth_++; }
701  void decrement_loop_depth() {
702    DCHECK(loop_depth_ > 0);
703    loop_depth_--;
704  }
705
706  MacroAssembler* masm() const { return masm_; }
707
708  class ExpressionContext;
709  const ExpressionContext* context() { return context_; }
710  void set_new_context(const ExpressionContext* context) { context_ = context; }
711
712  Isolate* isolate() const { return isolate_; }
713  Zone* zone() const { return zone_; }
714  Handle<Script> script() { return info_->script(); }
715  LanguageMode language_mode() { return scope()->language_mode(); }
716  bool has_simple_parameters() { return info_->has_simple_parameters(); }
717  FunctionLiteral* literal() const { return info_->literal(); }
718  Scope* scope() { return scope_; }
719
720  static Register context_register();
721
722  // Get fields from the stack frame. Offsets are the frame pointer relative
723  // offsets defined in, e.g., StandardFrameConstants.
724  void LoadFromFrameField(int frame_offset, Register value);
725  // Set fields in the stack frame. Offsets are the frame pointer relative
726  // offsets defined in, e.g., StandardFrameConstants.
727  void StoreToFrameField(int frame_offset, Register value);
728
729  // Load a value from the current context. Indices are defined as an enum
730  // in v8::internal::Context.
731  void LoadContextField(Register dst, int context_index);
732
733  // Push the function argument for the runtime functions PushWithContext
734  // and PushCatchContext.
735  void PushFunctionArgumentForContextAllocation();
736
737  void PushCalleeAndWithBaseObject(Call* expr);
738
739  // AST node visit functions.
740#define DECLARE_VISIT(type) void Visit##type(type* node) override;
741  AST_NODE_LIST(DECLARE_VISIT)
742#undef DECLARE_VISIT
743
744  void VisitComma(BinaryOperation* expr);
745  void VisitLogicalExpression(BinaryOperation* expr);
746  void VisitArithmeticExpression(BinaryOperation* expr);
747
748  void VisitForTypeofValue(Expression* expr);
749
750  void Generate();
751  void PopulateDeoptimizationData(Handle<Code> code);
752  void PopulateTypeFeedbackInfo(Handle<Code> code);
753  void PopulateHandlerTable(Handle<Code> code);
754
755  bool MustCreateObjectLiteralWithRuntime(ObjectLiteral* expr) const;
756  bool MustCreateArrayLiteralWithRuntime(ArrayLiteral* expr) const;
757
758  int NewHandlerTableEntry();
759
760  struct BailoutEntry {
761    BailoutId id;
762    unsigned pc_and_state;
763  };
764
765  struct BackEdgeEntry {
766    BailoutId id;
767    unsigned pc;
768    uint32_t loop_depth;
769  };
770
771  struct HandlerTableEntry {
772    unsigned range_start;
773    unsigned range_end;
774    unsigned handler_offset;
775    int stack_depth;
776    int try_catch_depth;
777  };
778
779  class ExpressionContext BASE_EMBEDDED {
780   public:
781    explicit ExpressionContext(FullCodeGenerator* codegen)
782        : masm_(codegen->masm()), old_(codegen->context()), codegen_(codegen) {
783      codegen->set_new_context(this);
784    }
785
786    virtual ~ExpressionContext() {
787      codegen_->set_new_context(old_);
788    }
789
790    Isolate* isolate() const { return codegen_->isolate(); }
791
792    // Convert constant control flow (true or false) to the result expected for
793    // this expression context.
794    virtual void Plug(bool flag) const = 0;
795
796    // Emit code to convert a pure value (in a register, known variable
797    // location, as a literal, or on top of the stack) into the result
798    // expected according to this expression context.
799    virtual void Plug(Register reg) const = 0;
800    virtual void Plug(Variable* var) const = 0;
801    virtual void Plug(Handle<Object> lit) const = 0;
802    virtual void Plug(Heap::RootListIndex index) const = 0;
803    virtual void PlugTOS() const = 0;
804
805    // Emit code to convert pure control flow to a pair of unbound labels into
806    // the result expected according to this expression context.  The
807    // implementation will bind both labels unless it's a TestContext, which
808    // won't bind them at this point.
809    virtual void Plug(Label* materialize_true,
810                      Label* materialize_false) const = 0;
811
812    // Emit code to discard count elements from the top of stack, then convert
813    // a pure value into the result expected according to this expression
814    // context.
815    virtual void DropAndPlug(int count, Register reg) const = 0;
816
817    // Set up branch labels for a test expression.  The three Label** parameters
818    // are output parameters.
819    virtual void PrepareTest(Label* materialize_true,
820                             Label* materialize_false,
821                             Label** if_true,
822                             Label** if_false,
823                             Label** fall_through) const = 0;
824
825    // Returns true if we are evaluating only for side effects (i.e. if the
826    // result will be discarded).
827    virtual bool IsEffect() const { return false; }
828
829    // Returns true if we are evaluating for the value (in accu/on stack).
830    virtual bool IsAccumulatorValue() const { return false; }
831    virtual bool IsStackValue() const { return false; }
832
833    // Returns true if we are branching on the value rather than materializing
834    // it.  Only used for asserts.
835    virtual bool IsTest() const { return false; }
836
837   protected:
838    FullCodeGenerator* codegen() const { return codegen_; }
839    MacroAssembler* masm() const { return masm_; }
840    MacroAssembler* masm_;
841
842   private:
843    const ExpressionContext* old_;
844    FullCodeGenerator* codegen_;
845  };
846
847  class AccumulatorValueContext : public ExpressionContext {
848   public:
849    explicit AccumulatorValueContext(FullCodeGenerator* codegen)
850        : ExpressionContext(codegen) { }
851
852    void Plug(bool flag) const override;
853    void Plug(Register reg) const override;
854    void Plug(Label* materialize_true, Label* materialize_false) const override;
855    void Plug(Variable* var) const override;
856    void Plug(Handle<Object> lit) const override;
857    void Plug(Heap::RootListIndex) const override;
858    void PlugTOS() const override;
859    void DropAndPlug(int count, Register reg) const override;
860    void PrepareTest(Label* materialize_true, Label* materialize_false,
861                     Label** if_true, Label** if_false,
862                     Label** fall_through) const override;
863    bool IsAccumulatorValue() const override { return true; }
864  };
865
866  class StackValueContext : public ExpressionContext {
867   public:
868    explicit StackValueContext(FullCodeGenerator* codegen)
869        : ExpressionContext(codegen) { }
870
871    void Plug(bool flag) const override;
872    void Plug(Register reg) const override;
873    void Plug(Label* materialize_true, Label* materialize_false) const override;
874    void Plug(Variable* var) const override;
875    void Plug(Handle<Object> lit) const override;
876    void Plug(Heap::RootListIndex) const override;
877    void PlugTOS() const override;
878    void DropAndPlug(int count, Register reg) const override;
879    void PrepareTest(Label* materialize_true, Label* materialize_false,
880                     Label** if_true, Label** if_false,
881                     Label** fall_through) const override;
882    bool IsStackValue() const override { return true; }
883  };
884
885  class TestContext : public ExpressionContext {
886   public:
887    TestContext(FullCodeGenerator* codegen,
888                Expression* condition,
889                Label* true_label,
890                Label* false_label,
891                Label* fall_through)
892        : ExpressionContext(codegen),
893          condition_(condition),
894          true_label_(true_label),
895          false_label_(false_label),
896          fall_through_(fall_through) { }
897
898    static const TestContext* cast(const ExpressionContext* context) {
899      DCHECK(context->IsTest());
900      return reinterpret_cast<const TestContext*>(context);
901    }
902
903    Expression* condition() const { return condition_; }
904    Label* true_label() const { return true_label_; }
905    Label* false_label() const { return false_label_; }
906    Label* fall_through() const { return fall_through_; }
907
908    void Plug(bool flag) const override;
909    void Plug(Register reg) const override;
910    void Plug(Label* materialize_true, Label* materialize_false) const override;
911    void Plug(Variable* var) const override;
912    void Plug(Handle<Object> lit) const override;
913    void Plug(Heap::RootListIndex) const override;
914    void PlugTOS() const override;
915    void DropAndPlug(int count, Register reg) const override;
916    void PrepareTest(Label* materialize_true, Label* materialize_false,
917                     Label** if_true, Label** if_false,
918                     Label** fall_through) const override;
919    bool IsTest() const override { return true; }
920
921   private:
922    Expression* condition_;
923    Label* true_label_;
924    Label* false_label_;
925    Label* fall_through_;
926  };
927
928  class EffectContext : public ExpressionContext {
929   public:
930    explicit EffectContext(FullCodeGenerator* codegen)
931        : ExpressionContext(codegen) { }
932
933    void Plug(bool flag) const override;
934    void Plug(Register reg) const override;
935    void Plug(Label* materialize_true, Label* materialize_false) const override;
936    void Plug(Variable* var) const override;
937    void Plug(Handle<Object> lit) const override;
938    void Plug(Heap::RootListIndex) const override;
939    void PlugTOS() const override;
940    void DropAndPlug(int count, Register reg) const override;
941    void PrepareTest(Label* materialize_true, Label* materialize_false,
942                     Label** if_true, Label** if_false,
943                     Label** fall_through) const override;
944    bool IsEffect() const override { return true; }
945  };
946
947  class EnterBlockScopeIfNeeded {
948   public:
949    EnterBlockScopeIfNeeded(FullCodeGenerator* codegen, Scope* scope,
950                            BailoutId entry_id, BailoutId declarations_id,
951                            BailoutId exit_id);
952    ~EnterBlockScopeIfNeeded();
953
954   private:
955    MacroAssembler* masm() const { return codegen_->masm(); }
956
957    FullCodeGenerator* codegen_;
958    Scope* saved_scope_;
959    BailoutId exit_id_;
960    bool needs_block_context_;
961  };
962
963  MacroAssembler* masm_;
964  CompilationInfo* info_;
965  Isolate* isolate_;
966  Zone* zone_;
967  Scope* scope_;
968  Label return_label_;
969  NestedStatement* nesting_stack_;
970  int loop_depth_;
971  int try_catch_depth_;
972  int operand_stack_depth_;
973  ZoneList<Handle<Object> >* globals_;
974  Handle<FixedArray> modules_;
975  int module_index_;
976  const ExpressionContext* context_;
977  ZoneList<BailoutEntry> bailout_entries_;
978  ZoneList<BackEdgeEntry> back_edges_;
979  ZoneVector<HandlerTableEntry> handler_table_;
980  int ic_total_count_;
981  Handle<Cell> profiling_counter_;
982
983  friend class NestedStatement;
984
985  DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
986  DISALLOW_COPY_AND_ASSIGN(FullCodeGenerator);
987};
988
989
990class BackEdgeTable {
991 public:
992  BackEdgeTable(Code* code, DisallowHeapAllocation* required) {
993    DCHECK(code->kind() == Code::FUNCTION);
994    instruction_start_ = code->instruction_start();
995    Address table_address = instruction_start_ + code->back_edge_table_offset();
996    length_ = Memory::uint32_at(table_address);
997    start_ = table_address + kTableLengthSize;
998  }
999
1000  uint32_t length() { return length_; }
1001
1002  BailoutId ast_id(uint32_t index) {
1003    return BailoutId(static_cast<int>(
1004        Memory::uint32_at(entry_at(index) + kAstIdOffset)));
1005  }
1006
1007  uint32_t loop_depth(uint32_t index) {
1008    return Memory::uint32_at(entry_at(index) + kLoopDepthOffset);
1009  }
1010
1011  uint32_t pc_offset(uint32_t index) {
1012    return Memory::uint32_at(entry_at(index) + kPcOffsetOffset);
1013  }
1014
1015  Address pc(uint32_t index) {
1016    return instruction_start_ + pc_offset(index);
1017  }
1018
1019  enum BackEdgeState { INTERRUPT, ON_STACK_REPLACEMENT };
1020
1021  // Increase allowed loop nesting level by one and patch those matching loops.
1022  static void Patch(Isolate* isolate, Code* unoptimized_code);
1023
1024  // Patch the back edge to the target state, provided the correct callee.
1025  static void PatchAt(Code* unoptimized_code,
1026                      Address pc,
1027                      BackEdgeState target_state,
1028                      Code* replacement_code);
1029
1030  // Change all patched back edges back to normal interrupts.
1031  static void Revert(Isolate* isolate,
1032                     Code* unoptimized_code);
1033
1034  // Return the current patch state of the back edge.
1035  static BackEdgeState GetBackEdgeState(Isolate* isolate,
1036                                        Code* unoptimized_code,
1037                                        Address pc_after);
1038
1039#ifdef DEBUG
1040  // Verify that all back edges of a certain loop depth are patched.
1041  static bool Verify(Isolate* isolate, Code* unoptimized_code);
1042#endif  // DEBUG
1043
1044 private:
1045  Address entry_at(uint32_t index) {
1046    DCHECK(index < length_);
1047    return start_ + index * kEntrySize;
1048  }
1049
1050  static const int kTableLengthSize = kIntSize;
1051  static const int kAstIdOffset = 0 * kIntSize;
1052  static const int kPcOffsetOffset = 1 * kIntSize;
1053  static const int kLoopDepthOffset = 2 * kIntSize;
1054  static const int kEntrySize = 3 * kIntSize;
1055
1056  Address start_;
1057  Address instruction_start_;
1058  uint32_t length_;
1059};
1060
1061
1062}  // namespace internal
1063}  // namespace v8
1064
1065#endif  // V8_FULL_CODEGEN_FULL_CODEGEN_H_
1066