1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_FULL_CODEGEN_FULL_CODEGEN_H_
6#define V8_FULL_CODEGEN_FULL_CODEGEN_H_
7
8#include "src/allocation.h"
9#include "src/assert-scope.h"
10#include "src/ast/ast.h"
11#include "src/ast/scopes.h"
12#include "src/bit-vector.h"
13#include "src/code-factory.h"
14#include "src/code-stubs.h"
15#include "src/codegen.h"
16#include "src/deoptimizer.h"
17#include "src/globals.h"
18#include "src/objects.h"
19#include "src/source-position-table.h"
20
21namespace v8 {
22namespace internal {
23
24// Forward declarations.
25class CompilationInfo;
26class CompilationJob;
27class JumpPatchSite;
28class Scope;
29
30// -----------------------------------------------------------------------------
31// Full code generator.
32
33class FullCodeGenerator final : public AstVisitor<FullCodeGenerator> {
34 public:
35  FullCodeGenerator(MacroAssembler* masm, CompilationInfo* info,
36                    uintptr_t stack_limit);
37
38  void Initialize(uintptr_t stack_limit);
39
40  static CompilationJob* NewCompilationJob(CompilationInfo* info);
41
42  static bool MakeCode(CompilationInfo* info, uintptr_t stack_limit);
43  static bool MakeCode(CompilationInfo* info);
44
45  // Encode bailout state and pc-offset as a BitField<type, start, size>.
46  // Only use 30 bits because we encode the result as a smi.
47  class BailoutStateField : public BitField<Deoptimizer::BailoutState, 0, 1> {};
48  class PcField : public BitField<unsigned, 1, 30 - 1> {};
49
50  static const int kMaxBackEdgeWeight = 127;
51
52  // Platform-specific code size multiplier.
53#if V8_TARGET_ARCH_IA32 || V8_TARGET_ARCH_X87
54  static const int kCodeSizeMultiplier = 105;
55#elif V8_TARGET_ARCH_X64
56  static const int kCodeSizeMultiplier = 165;
57#elif V8_TARGET_ARCH_ARM
58  static const int kCodeSizeMultiplier = 149;
59#elif V8_TARGET_ARCH_ARM64
60  static const int kCodeSizeMultiplier = 220;
61#elif V8_TARGET_ARCH_PPC64
62  static const int kCodeSizeMultiplier = 200;
63#elif V8_TARGET_ARCH_PPC
64  static const int kCodeSizeMultiplier = 200;
65#elif V8_TARGET_ARCH_MIPS
66  static const int kCodeSizeMultiplier = 149;
67#elif V8_TARGET_ARCH_MIPS64
68  static const int kCodeSizeMultiplier = 149;
69#elif V8_TARGET_ARCH_S390
70// TODO(joransiu): Copied PPC value. Check this is sensible for S390.
71  static const int kCodeSizeMultiplier = 200;
72#elif V8_TARGET_ARCH_S390X
73// TODO(joransiu): Copied PPC value. Check this is sensible for S390X.
74  static const int kCodeSizeMultiplier = 200;
75#else
76#error Unsupported target architecture.
77#endif
78
79  static Register result_register();
80
81 private:
82  typedef Deoptimizer::BailoutState BailoutState;
83
84  class Breakable;
85  class Iteration;
86
87  class TestContext;
88
89  class NestedStatement BASE_EMBEDDED {
90   public:
91    explicit NestedStatement(FullCodeGenerator* codegen)
92        : codegen_(codegen),
93          stack_depth_at_target_(codegen->operand_stack_depth_) {
94      // Link into codegen's nesting stack.
95      previous_ = codegen->nesting_stack_;
96      codegen->nesting_stack_ = this;
97    }
98    virtual ~NestedStatement() {
99      // Unlink from codegen's nesting stack.
100      DCHECK_EQ(this, codegen_->nesting_stack_);
101      codegen_->nesting_stack_ = previous_;
102    }
103
104    virtual Breakable* AsBreakable() { return nullptr; }
105    virtual Iteration* AsIteration() { return nullptr; }
106
107    virtual bool IsContinueTarget(Statement* target) { return false; }
108    virtual bool IsBreakTarget(Statement* target) { return false; }
109
110    // Notify the statement that we are exiting it via break, continue, or
111    // return and give it a chance to generate cleanup code.  Return the
112    // next outer statement in the nesting stack.  We accumulate in
113    // {*context_length} the number of context chain links to unwind as we
114    // traverse the nesting stack from an exit to its target.
115    virtual NestedStatement* Exit(int* context_length) { return previous_; }
116
117    // Determine the expected operand stack depth when this statement is being
118    // used as the target of an exit. The caller will drop to this depth.
119    int GetStackDepthAtTarget() { return stack_depth_at_target_; }
120
121   protected:
122    MacroAssembler* masm() { return codegen_->masm(); }
123
124    FullCodeGenerator* codegen_;
125    NestedStatement* previous_;
126    int stack_depth_at_target_;
127
128   private:
129    DISALLOW_COPY_AND_ASSIGN(NestedStatement);
130  };
131
132  // A breakable statement such as a block.
133  class Breakable : public NestedStatement {
134   public:
135    Breakable(FullCodeGenerator* codegen, BreakableStatement* statement)
136        : NestedStatement(codegen), statement_(statement) {
137    }
138
139    Breakable* AsBreakable() override { return this; }
140    bool IsBreakTarget(Statement* target) override {
141      return statement() == target;
142    }
143
144    BreakableStatement* statement() { return statement_; }
145    Label* break_label() { return &break_label_; }
146
147   private:
148    BreakableStatement* statement_;
149    Label break_label_;
150  };
151
152  // An iteration statement such as a while, for, or do loop.
153  class Iteration : public Breakable {
154   public:
155    Iteration(FullCodeGenerator* codegen, IterationStatement* statement)
156        : Breakable(codegen, statement) {
157    }
158
159    Iteration* AsIteration() override { return this; }
160    bool IsContinueTarget(Statement* target) override {
161      return statement() == target;
162    }
163
164    Label* continue_label() { return &continue_label_; }
165
166   private:
167    Label continue_label_;
168  };
169
170  // A nested block statement.
171  class NestedBlock : public Breakable {
172   public:
173    NestedBlock(FullCodeGenerator* codegen, Block* block)
174        : Breakable(codegen, block) {
175    }
176
177    NestedStatement* Exit(int* context_length) override {
178      auto block_scope = statement()->AsBlock()->scope();
179      if (block_scope != nullptr) {
180        if (block_scope->ContextLocalCount() > 0) ++(*context_length);
181      }
182      return previous_;
183    }
184  };
185
186  // A platform-specific utility to overwrite the accumulator register
187  // with a GC-safe value.
188  void ClearAccumulator();
189
190  // Determine whether or not to inline the smi case for the given
191  // operation.
192  bool ShouldInlineSmiCase(Token::Value op);
193
194  // Helper function to convert a pure value into a test context.  The value
195  // is expected on the stack or the accumulator, depending on the platform.
196  // See the platform-specific implementation for details.
197  void DoTest(Expression* condition,
198              Label* if_true,
199              Label* if_false,
200              Label* fall_through);
201  void DoTest(const TestContext* context);
202
203  // Helper function to split control flow and avoid a branch to the
204  // fall-through label if it is set up.
205#if V8_TARGET_ARCH_MIPS
206  void Split(Condition cc,
207             Register lhs,
208             const Operand&  rhs,
209             Label* if_true,
210             Label* if_false,
211             Label* fall_through);
212#elif V8_TARGET_ARCH_MIPS64
213  void Split(Condition cc,
214             Register lhs,
215             const Operand&  rhs,
216             Label* if_true,
217             Label* if_false,
218             Label* fall_through);
219#elif V8_TARGET_ARCH_PPC
220  void Split(Condition cc, Label* if_true, Label* if_false, Label* fall_through,
221             CRegister cr = cr7);
222#else  // All other arch.
223  void Split(Condition cc,
224             Label* if_true,
225             Label* if_false,
226             Label* fall_through);
227#endif
228
229  // Load the value of a known (PARAMETER, LOCAL, or CONTEXT) variable into
230  // a register.  Emits a context chain walk if if necessary (so does
231  // SetVar) so avoid calling both on the same variable.
232  void GetVar(Register destination, Variable* var);
233
234  // Assign to a known (PARAMETER, LOCAL, or CONTEXT) variable.  If it's in
235  // the context, the write barrier will be emitted and source, scratch0,
236  // scratch1 will be clobbered.  Emits a context chain walk if if necessary
237  // (so does GetVar) so avoid calling both on the same variable.
238  void SetVar(Variable* var,
239              Register source,
240              Register scratch0,
241              Register scratch1);
242
243  // An operand used to read/write a stack-allocated (PARAMETER or LOCAL)
244  // variable.  Writing does not need the write barrier.
245  MemOperand StackOperand(Variable* var);
246
247  // An operand used to read/write a known (PARAMETER, LOCAL, or CONTEXT)
248  // variable.  May emit code to traverse the context chain, loading the
249  // found context into the scratch register.  Writing to this operand will
250  // need the write barrier if location is CONTEXT.
251  MemOperand VarOperand(Variable* var, Register scratch);
252
253  void VisitForEffect(Expression* expr) {
254    if (FLAG_verify_operand_stack_depth) EmitOperandStackDepthCheck();
255    EffectContext context(this);
256    Visit(expr);
257    PrepareForBailout(expr, BailoutState::NO_REGISTERS);
258  }
259
260  void VisitForAccumulatorValue(Expression* expr) {
261    if (FLAG_verify_operand_stack_depth) EmitOperandStackDepthCheck();
262    AccumulatorValueContext context(this);
263    Visit(expr);
264    PrepareForBailout(expr, BailoutState::TOS_REGISTER);
265  }
266
267  void VisitForStackValue(Expression* expr) {
268    if (FLAG_verify_operand_stack_depth) EmitOperandStackDepthCheck();
269    StackValueContext context(this);
270    Visit(expr);
271    PrepareForBailout(expr, BailoutState::NO_REGISTERS);
272  }
273
274  void VisitForControl(Expression* expr,
275                       Label* if_true,
276                       Label* if_false,
277                       Label* fall_through) {
278    if (FLAG_verify_operand_stack_depth) EmitOperandStackDepthCheck();
279    TestContext context(this, expr, if_true, if_false, fall_through);
280    Visit(expr);
281    // For test contexts, we prepare for bailout before branching, not at
282    // the end of the entire expression.  This happens as part of visiting
283    // the expression.
284  }
285
286  void VisitInDuplicateContext(Expression* expr);
287
288  void VisitDeclarations(Declaration::List* declarations);
289  void DeclareGlobals(Handle<FixedArray> pairs);
290  int DeclareGlobalsFlags();
291
292  // Push, pop or drop values onto/from the operand stack.
293  void PushOperand(Register reg);
294  void PopOperand(Register reg);
295  void DropOperands(int count);
296
297  // Convenience helpers for pushing onto the operand stack.
298  void PushOperand(MemOperand operand);
299  void PushOperand(Handle<Object> handle);
300  void PushOperand(Smi* smi);
301
302  // Convenience helpers for pushing/popping multiple operands.
303  void PushOperands(Register reg1, Register reg2);
304  void PushOperands(Register reg1, Register reg2, Register reg3);
305  void PushOperands(Register reg1, Register reg2, Register reg3, Register reg4);
306  void PopOperands(Register reg1, Register reg2);
307
308  // Convenience helper for calling a runtime function that consumes arguments
309  // from the operand stack (only usable for functions with known arity).
310  void CallRuntimeWithOperands(Runtime::FunctionId function_id);
311
312  // Static tracking of the operand stack depth.
313  void OperandStackDepthDecrement(int count);
314  void OperandStackDepthIncrement(int count);
315
316  // Generate debug code that verifies that our static tracking of the operand
317  // stack depth is in sync with the actual operand stack during runtime.
318  void EmitOperandStackDepthCheck();
319
320  // Generate code to create an iterator result object.  The "value" property is
321  // set to a value popped from the stack, and "done" is set according to the
322  // argument.  The result object is left in the result register.
323  void EmitCreateIteratorResult(bool done);
324
325  // Try to perform a comparison as a fast inlined literal compare if
326  // the operands allow it.  Returns true if the compare operations
327  // has been matched and all code generated; false otherwise.
328  bool TryLiteralCompare(CompareOperation* compare);
329
330  // Platform-specific code for comparing the type of a value with
331  // a given literal string.
332  void EmitLiteralCompareTypeof(Expression* expr,
333                                Expression* sub_expr,
334                                Handle<String> check);
335
336  // Platform-specific code for equality comparison with a nil-like value.
337  void EmitLiteralCompareNil(CompareOperation* expr,
338                             Expression* sub_expr,
339                             NilValue nil);
340
341  // Bailout support.
342  void PrepareForBailout(Expression* node, Deoptimizer::BailoutState state);
343  void PrepareForBailoutForId(BailoutId id, Deoptimizer::BailoutState state);
344
345  // Returns an int32 for the index into the FixedArray that backs the feedback
346  // vector
347  int32_t IntFromSlot(FeedbackSlot slot) const {
348    return FeedbackVector::GetIndex(slot);
349  }
350
351  // Returns a smi for the index into the FixedArray that backs the feedback
352  // vector
353  Smi* SmiFromSlot(FeedbackSlot slot) const {
354    return Smi::FromInt(IntFromSlot(slot));
355  }
356
357  // Record a call's return site offset, used to rebuild the frame if the
358  // called function was inlined at the site.
359  void RecordJSReturnSite(Call* call);
360
361  // Prepare for bailout before a test (or compare) and branch.  If
362  // should_normalize, then the following comparison will not handle the
363  // canonical JS true value so we will insert a (dead) test against true at
364  // the actual bailout target from the optimized code. If not
365  // should_normalize, the true and false labels are ignored.
366  void PrepareForBailoutBeforeSplit(Expression* expr,
367                                    bool should_normalize,
368                                    Label* if_true,
369                                    Label* if_false);
370
371  // If enabled, emit debug code for checking that the current context is
372  // neither a with nor a catch context.
373  void EmitDebugCheckDeclarationContext(Variable* variable);
374
375  // This is meant to be called at loop back edges, |back_edge_target| is
376  // the jump target of the back edge and is used to approximate the amount
377  // of code inside the loop.
378  void EmitBackEdgeBookkeeping(IterationStatement* stmt,
379                               Label* back_edge_target);
380  // Record the OSR AST id corresponding to a back edge in the code.
381  void RecordBackEdge(BailoutId osr_ast_id);
382  // Emit a table of back edge ids, pcs and loop depths into the code stream.
383  // Return the offset of the start of the table.
384  unsigned EmitBackEdgeTable();
385
386  void EmitProfilingCounterDecrement(int delta);
387  void EmitProfilingCounterReset();
388
389  // Emit code to pop values from the stack associated with nested statements
390  // like try/catch, try/finally, etc, running the finallies and unwinding the
391  // handlers as needed. Also emits the return sequence if necessary (i.e.,
392  // if the return is not delayed by a finally block).
393  void EmitUnwindAndReturn();
394
395  // Platform-specific return sequence
396  void EmitReturnSequence();
397  void EmitProfilingCounterHandlingForReturnSequence(bool is_tail_call);
398
399  // Platform-specific code sequences for calls
400  void EmitCall(Call* expr, ConvertReceiverMode = ConvertReceiverMode::kAny);
401  void EmitCallWithLoadIC(Call* expr);
402  void EmitKeyedCallWithLoadIC(Call* expr, Expression* key);
403
404#define FOR_EACH_FULL_CODE_INTRINSIC(F) \
405  F(IsSmi)                              \
406  F(IsArray)                            \
407  F(IsTypedArray)                       \
408  F(IsJSProxy)                          \
409  F(Call)                               \
410  F(IsJSReceiver)                       \
411  F(GetSuperConstructor)                \
412  F(DebugBreakInOptimizedCode)          \
413  F(ClassOf)                            \
414  F(StringCharCodeAt)                   \
415  F(SubString)                          \
416  F(ToInteger)                          \
417  F(ToString)                           \
418  F(ToLength)                           \
419  F(ToNumber)                           \
420  F(ToObject)                           \
421  F(DebugIsActive)                      \
422  F(CreateIterResultObject)
423
424#define GENERATOR_DECLARATION(Name) void Emit##Name(CallRuntime* call);
425  FOR_EACH_FULL_CODE_INTRINSIC(GENERATOR_DECLARATION)
426#undef GENERATOR_DECLARATION
427
428  void EmitIntrinsicAsStubCall(CallRuntime* expr, const Callable& callable);
429
430  // Emits call to respective code stub.
431  void EmitHasProperty();
432
433  // Platform-specific code for restoring context from current JS frame.
434  void RestoreContext();
435
436  // Platform-specific code for loading variables.
437  void EmitGlobalVariableLoad(VariableProxy* proxy, TypeofMode typeof_mode);
438  void EmitVariableLoad(VariableProxy* proxy,
439                        TypeofMode typeof_mode = NOT_INSIDE_TYPEOF);
440
441  void EmitAccessor(ObjectLiteralProperty* property);
442
443  // Platform-specific support for allocating a new closure based on
444  // the given function info.
445  void EmitNewClosure(Handle<SharedFunctionInfo> info, FeedbackSlot slot,
446                      bool pretenure);
447
448  // Re-usable portions of CallRuntime
449  void EmitLoadJSRuntimeFunction(CallRuntime* expr);
450  void EmitCallJSRuntimeFunction(CallRuntime* expr);
451
452  // Load a value from a named property.
453  // The receiver is left on the stack by the IC.
454  void EmitNamedPropertyLoad(Property* expr);
455
456  // Load a value from a keyed property.
457  // The receiver and the key is left on the stack by the IC.
458  void EmitKeyedPropertyLoad(Property* expr);
459
460  // Apply the compound assignment operator. Expects the left operand on top
461  // of the stack and the right one in the accumulator.
462  void EmitBinaryOp(BinaryOperation* expr, Token::Value op);
463
464  // Helper functions for generating inlined smi code for certain
465  // binary operations.
466  void EmitInlineSmiBinaryOp(BinaryOperation* expr,
467                             Token::Value op,
468                             Expression* left,
469                             Expression* right);
470
471  // Assign to the given expression as if via '='. The right-hand-side value
472  // is expected in the accumulator. slot is only used if FLAG_vector_stores
473  // is true.
474  void EmitAssignment(Expression* expr, FeedbackSlot slot);
475
476  // Complete a variable assignment.  The right-hand-side value is expected
477  // in the accumulator.
478  void EmitVariableAssignment(Variable* var, Token::Value op, FeedbackSlot slot,
479                              HoleCheckMode hole_check_mode);
480
481  // Helper functions to EmitVariableAssignment
482  void EmitStoreToStackLocalOrContextSlot(Variable* var,
483                                          MemOperand location);
484
485  // Complete a named property assignment.  The receiver is expected on top
486  // of the stack and the right-hand-side value in the accumulator.
487  void EmitNamedPropertyAssignment(Assignment* expr);
488
489  // Complete a keyed property assignment.  The receiver and key are
490  // expected on top of the stack and the right-hand-side value in the
491  // accumulator.
492  void EmitKeyedPropertyAssignment(Assignment* expr);
493
494  static bool NeedsHomeObject(Expression* expr) {
495    return FunctionLiteral::NeedsHomeObject(expr);
496  }
497
498  // Adds the [[HomeObject]] to |initializer| if it is a FunctionLiteral.
499  // The value of the initializer is expected to be at the top of the stack.
500  // |offset| is the offset in the stack where the home object can be found.
501  void EmitSetHomeObject(Expression* initializer, int offset,
502                         FeedbackSlot slot);
503
504  void EmitSetHomeObjectAccumulator(Expression* initializer, int offset,
505                                    FeedbackSlot slot);
506
507  // Platform-specific code for loading a slot to a register.
508  void EmitLoadSlot(Register destination, FeedbackSlot slot);
509  // Platform-specific code for pushing a slot to the stack.
510  void EmitPushSlot(FeedbackSlot slot);
511
512  void CallIC(Handle<Code> code,
513              TypeFeedbackId id = TypeFeedbackId::None());
514
515  void CallLoadIC(FeedbackSlot slot, Handle<Object> name);
516  void CallStoreIC(FeedbackSlot slot, Handle<Object> name,
517                   bool store_own_property = false);
518  void CallKeyedStoreIC(FeedbackSlot slot);
519
520  void SetFunctionPosition(FunctionLiteral* fun);
521  void SetReturnPosition(FunctionLiteral* fun);
522
523  enum InsertBreak { INSERT_BREAK, SKIP_BREAK };
524
525  // During stepping we want to be able to break at each statement, but not at
526  // every (sub-)expression. That is why by default we insert breaks at every
527  // statement position, but not at every expression position, unless stated
528  // otherwise.
529  void SetStatementPosition(Statement* stmt,
530                            InsertBreak insert_break = INSERT_BREAK);
531  void SetExpressionPosition(Expression* expr);
532
533  // Consider an expression a statement. As such, we also insert a break.
534  // This is used in loop headers where we want to break for each iteration.
535  void SetExpressionAsStatementPosition(Expression* expr);
536
537  void SetCallPosition(Expression* expr,
538                       TailCallMode tail_call_mode = TailCallMode::kDisallow);
539
540  void SetConstructCallPosition(Expression* expr) {
541    // Currently call and construct calls are treated the same wrt debugging.
542    SetCallPosition(expr);
543  }
544
545  void RecordStatementPosition(int pos);
546  void RecordPosition(int pos);
547
548  // Local control flow support.
549  void EmitContinue(Statement* target);
550  void EmitBreak(Statement* target);
551
552  // Loop nesting counter.
553  int loop_depth() { return loop_depth_; }
554  void increment_loop_depth() { loop_depth_++; }
555  void decrement_loop_depth() {
556    DCHECK(loop_depth_ > 0);
557    loop_depth_--;
558  }
559
560  MacroAssembler* masm() const { return masm_; }
561
562  class ExpressionContext;
563  const ExpressionContext* context() { return context_; }
564  void set_new_context(const ExpressionContext* context) { context_ = context; }
565
566  Isolate* isolate() const { return isolate_; }
567  Zone* zone() const { return zone_; }
568  Handle<Script> script();
569  LanguageMode language_mode();
570  bool has_simple_parameters();
571  FunctionLiteral* literal() const;
572  const FeedbackVectorSpec* feedback_vector_spec() const;
573  Scope* scope() { return scope_; }
574
575  static Register context_register();
576
577  // Get fields from the stack frame. Offsets are the frame pointer relative
578  // offsets defined in, e.g., StandardFrameConstants.
579  void LoadFromFrameField(int frame_offset, Register value);
580  // Set fields in the stack frame. Offsets are the frame pointer relative
581  // offsets defined in, e.g., StandardFrameConstants.
582  void StoreToFrameField(int frame_offset, Register value);
583
584  // Load a value from the current context. Indices are defined as an enum
585  // in v8::internal::Context.
586  void LoadContextField(Register dst, int context_index);
587
588  // Push the function argument for the runtime functions PushWithContext
589  // and PushCatchContext.
590  void PushFunctionArgumentForContextAllocation();
591
592  // AST node visit functions.
593#define DECLARE_VISIT(type) void Visit##type(type* node);
594  AST_NODE_LIST(DECLARE_VISIT)
595#undef DECLARE_VISIT
596
597  void VisitComma(BinaryOperation* expr);
598  void VisitLogicalExpression(BinaryOperation* expr);
599  void VisitArithmeticExpression(BinaryOperation* expr);
600
601  void VisitForTypeofValue(Expression* expr);
602
603  void Generate();
604  void PopulateDeoptimizationData(Handle<Code> code);
605  void PopulateTypeFeedbackInfo(Handle<Code> code);
606
607  bool MustCreateObjectLiteralWithRuntime(ObjectLiteral* expr) const;
608  bool MustCreateArrayLiteralWithRuntime(ArrayLiteral* expr) const;
609
610  struct BailoutEntry {
611    BailoutId id;
612    unsigned pc_and_state;
613  };
614
615  struct BackEdgeEntry {
616    BailoutId id;
617    unsigned pc;
618    uint32_t loop_depth;
619  };
620
621  class ExpressionContext BASE_EMBEDDED {
622   public:
623    explicit ExpressionContext(FullCodeGenerator* codegen)
624        : masm_(codegen->masm()), old_(codegen->context()), codegen_(codegen) {
625      codegen->set_new_context(this);
626    }
627
628    virtual ~ExpressionContext() {
629      codegen_->set_new_context(old_);
630    }
631
632    Isolate* isolate() const { return codegen_->isolate(); }
633
634    // Convert constant control flow (true or false) to the result expected for
635    // this expression context.
636    virtual void Plug(bool flag) const = 0;
637
638    // Emit code to convert a pure value (in a register, known variable
639    // location, as a literal, or on top of the stack) into the result
640    // expected according to this expression context.
641    virtual void Plug(Register reg) const = 0;
642    virtual void Plug(Variable* var) const = 0;
643    virtual void Plug(Handle<Object> lit) const = 0;
644    virtual void Plug(Heap::RootListIndex index) const = 0;
645    virtual void PlugTOS() const = 0;
646
647    // Emit code to convert pure control flow to a pair of unbound labels into
648    // the result expected according to this expression context.  The
649    // implementation will bind both labels unless it's a TestContext, which
650    // won't bind them at this point.
651    virtual void Plug(Label* materialize_true,
652                      Label* materialize_false) const = 0;
653
654    // Emit code to discard count elements from the top of stack, then convert
655    // a pure value into the result expected according to this expression
656    // context.
657    virtual void DropAndPlug(int count, Register reg) const = 0;
658
659    // Set up branch labels for a test expression.  The three Label** parameters
660    // are output parameters.
661    virtual void PrepareTest(Label* materialize_true,
662                             Label* materialize_false,
663                             Label** if_true,
664                             Label** if_false,
665                             Label** fall_through) const = 0;
666
667    // Returns true if we are evaluating only for side effects (i.e. if the
668    // result will be discarded).
669    virtual bool IsEffect() const { return false; }
670
671    // Returns true if we are evaluating for the value (in accu/on stack).
672    virtual bool IsAccumulatorValue() const { return false; }
673    virtual bool IsStackValue() const { return false; }
674
675    // Returns true if we are branching on the value rather than materializing
676    // it.  Only used for asserts.
677    virtual bool IsTest() const { return false; }
678
679   protected:
680    FullCodeGenerator* codegen() const { return codegen_; }
681    MacroAssembler* masm() const { return masm_; }
682    MacroAssembler* masm_;
683
684   private:
685    const ExpressionContext* old_;
686    FullCodeGenerator* codegen_;
687  };
688
689  class AccumulatorValueContext : public ExpressionContext {
690   public:
691    explicit AccumulatorValueContext(FullCodeGenerator* codegen)
692        : ExpressionContext(codegen) { }
693
694    void Plug(bool flag) const override;
695    void Plug(Register reg) const override;
696    void Plug(Label* materialize_true, Label* materialize_false) const override;
697    void Plug(Variable* var) const override;
698    void Plug(Handle<Object> lit) const override;
699    void Plug(Heap::RootListIndex) const override;
700    void PlugTOS() const override;
701    void DropAndPlug(int count, Register reg) const override;
702    void PrepareTest(Label* materialize_true, Label* materialize_false,
703                     Label** if_true, Label** if_false,
704                     Label** fall_through) const override;
705    bool IsAccumulatorValue() const override { return true; }
706  };
707
708  class StackValueContext : public ExpressionContext {
709   public:
710    explicit StackValueContext(FullCodeGenerator* codegen)
711        : ExpressionContext(codegen) { }
712
713    void Plug(bool flag) const override;
714    void Plug(Register reg) const override;
715    void Plug(Label* materialize_true, Label* materialize_false) const override;
716    void Plug(Variable* var) const override;
717    void Plug(Handle<Object> lit) const override;
718    void Plug(Heap::RootListIndex) const override;
719    void PlugTOS() const override;
720    void DropAndPlug(int count, Register reg) const override;
721    void PrepareTest(Label* materialize_true, Label* materialize_false,
722                     Label** if_true, Label** if_false,
723                     Label** fall_through) const override;
724    bool IsStackValue() const override { return true; }
725  };
726
727  class TestContext : public ExpressionContext {
728   public:
729    TestContext(FullCodeGenerator* codegen,
730                Expression* condition,
731                Label* true_label,
732                Label* false_label,
733                Label* fall_through)
734        : ExpressionContext(codegen),
735          condition_(condition),
736          true_label_(true_label),
737          false_label_(false_label),
738          fall_through_(fall_through) { }
739
740    static const TestContext* cast(const ExpressionContext* context) {
741      DCHECK(context->IsTest());
742      return reinterpret_cast<const TestContext*>(context);
743    }
744
745    Expression* condition() const { return condition_; }
746    Label* true_label() const { return true_label_; }
747    Label* false_label() const { return false_label_; }
748    Label* fall_through() const { return fall_through_; }
749
750    void Plug(bool flag) const override;
751    void Plug(Register reg) const override;
752    void Plug(Label* materialize_true, Label* materialize_false) const override;
753    void Plug(Variable* var) const override;
754    void Plug(Handle<Object> lit) const override;
755    void Plug(Heap::RootListIndex) const override;
756    void PlugTOS() const override;
757    void DropAndPlug(int count, Register reg) const override;
758    void PrepareTest(Label* materialize_true, Label* materialize_false,
759                     Label** if_true, Label** if_false,
760                     Label** fall_through) const override;
761    bool IsTest() const override { return true; }
762
763   private:
764    Expression* condition_;
765    Label* true_label_;
766    Label* false_label_;
767    Label* fall_through_;
768  };
769
770  class EffectContext : public ExpressionContext {
771   public:
772    explicit EffectContext(FullCodeGenerator* codegen)
773        : ExpressionContext(codegen) { }
774
775    void Plug(bool flag) const override;
776    void Plug(Register reg) const override;
777    void Plug(Label* materialize_true, Label* materialize_false) const override;
778    void Plug(Variable* var) const override;
779    void Plug(Handle<Object> lit) const override;
780    void Plug(Heap::RootListIndex) const override;
781    void PlugTOS() const override;
782    void DropAndPlug(int count, Register reg) const override;
783    void PrepareTest(Label* materialize_true, Label* materialize_false,
784                     Label** if_true, Label** if_false,
785                     Label** fall_through) const override;
786    bool IsEffect() const override { return true; }
787  };
788
789  class EnterBlockScopeIfNeeded {
790   public:
791    EnterBlockScopeIfNeeded(FullCodeGenerator* codegen, Scope* scope,
792                            BailoutId entry_id, BailoutId declarations_id,
793                            BailoutId exit_id);
794    ~EnterBlockScopeIfNeeded();
795
796   private:
797    MacroAssembler* masm() const { return codegen_->masm(); }
798
799    FullCodeGenerator* codegen_;
800    Scope* saved_scope_;
801    BailoutId exit_id_;
802    bool needs_block_context_;
803  };
804
805  MacroAssembler* masm_;
806  CompilationInfo* info_;
807  Isolate* isolate_;
808  Zone* zone_;
809  Scope* scope_;
810  Label return_label_;
811  NestedStatement* nesting_stack_;
812  int loop_depth_;
813  int operand_stack_depth_;
814  ZoneList<Handle<Object> >* globals_;
815  const ExpressionContext* context_;
816  ZoneList<BailoutEntry> bailout_entries_;
817  ZoneList<BackEdgeEntry> back_edges_;
818  SourcePositionTableBuilder source_position_table_builder_;
819  int ic_total_count_;
820  Handle<Cell> profiling_counter_;
821
822  friend class NestedStatement;
823
824  DEFINE_AST_VISITOR_SUBCLASS_MEMBERS();
825  DISALLOW_COPY_AND_ASSIGN(FullCodeGenerator);
826};
827
828
829class BackEdgeTable {
830 public:
831  BackEdgeTable(Code* code, DisallowHeapAllocation* required) {
832    DCHECK(code->kind() == Code::FUNCTION);
833    instruction_start_ = code->instruction_start();
834    Address table_address = instruction_start_ + code->back_edge_table_offset();
835    length_ = Memory::uint32_at(table_address);
836    start_ = table_address + kTableLengthSize;
837  }
838
839  uint32_t length() { return length_; }
840
841  BailoutId ast_id(uint32_t index) {
842    return BailoutId(static_cast<int>(
843        Memory::uint32_at(entry_at(index) + kAstIdOffset)));
844  }
845
846  uint32_t loop_depth(uint32_t index) {
847    return Memory::uint32_at(entry_at(index) + kLoopDepthOffset);
848  }
849
850  uint32_t pc_offset(uint32_t index) {
851    return Memory::uint32_at(entry_at(index) + kPcOffsetOffset);
852  }
853
854  Address pc(uint32_t index) {
855    return instruction_start_ + pc_offset(index);
856  }
857
858  enum BackEdgeState { INTERRUPT, ON_STACK_REPLACEMENT };
859
860  // Increase allowed loop nesting level by one and patch those matching loops.
861  static void Patch(Isolate* isolate, Code* unoptimized_code);
862
863  // Patch the back edge to the target state, provided the correct callee.
864  static void PatchAt(Code* unoptimized_code,
865                      Address pc,
866                      BackEdgeState target_state,
867                      Code* replacement_code);
868
869  // Change all patched back edges back to normal interrupts.
870  static void Revert(Isolate* isolate,
871                     Code* unoptimized_code);
872
873  // Return the current patch state of the back edge.
874  static BackEdgeState GetBackEdgeState(Isolate* isolate,
875                                        Code* unoptimized_code,
876                                        Address pc_after);
877
878#ifdef DEBUG
879  // Verify that all back edges of a certain loop depth are patched.
880  static bool Verify(Isolate* isolate, Code* unoptimized_code);
881#endif  // DEBUG
882
883 private:
884  Address entry_at(uint32_t index) {
885    DCHECK(index < length_);
886    return start_ + index * kEntrySize;
887  }
888
889  static const int kTableLengthSize = kIntSize;
890  static const int kAstIdOffset = 0 * kIntSize;
891  static const int kPcOffsetOffset = 1 * kIntSize;
892  static const int kLoopDepthOffset = 2 * kIntSize;
893  static const int kEntrySize = 3 * kIntSize;
894
895  Address start_;
896  Address instruction_start_;
897  uint32_t length_;
898};
899
900
901}  // namespace internal
902}  // namespace v8
903
904#endif  // V8_FULL_CODEGEN_FULL_CODEGEN_H_
905