codegen-x64.h revision 791712a13f1814dd3ab5d1a5ab8ff5dbc476f6d6
1// Copyright 2010 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_X64_CODEGEN_X64_H_
29#define V8_X64_CODEGEN_X64_H_
30
31#include "ast.h"
32#include "ic-inl.h"
33#include "jump-target-heavy.h"
34
35namespace v8 {
36namespace internal {
37
38// Forward declarations
39class CompilationInfo;
40class DeferredCode;
41class RegisterAllocator;
42class RegisterFile;
43
44enum InitState { CONST_INIT, NOT_CONST_INIT };
45enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF };
46
47
48// -------------------------------------------------------------------------
49// Reference support
50
51// A reference is a C++ stack-allocated object that puts a
52// reference on the virtual frame.  The reference may be consumed
53// by GetValue, TakeValue, SetValue, and Codegen::UnloadReference.
54// When the lifetime (scope) of a valid reference ends, it must have
55// been consumed, and be in state UNLOADED.
56class Reference BASE_EMBEDDED {
57 public:
58  // The values of the types is important, see size().
59  enum Type { UNLOADED = -2, ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 };
60
61  Reference(CodeGenerator* cgen,
62            Expression* expression,
63            bool persist_after_get = false);
64  ~Reference();
65
66  Expression* expression() const { return expression_; }
67  Type type() const { return type_; }
68  void set_type(Type value) {
69    ASSERT_EQ(ILLEGAL, type_);
70    type_ = value;
71  }
72
73  void set_unloaded() {
74    ASSERT_NE(ILLEGAL, type_);
75    ASSERT_NE(UNLOADED, type_);
76    type_ = UNLOADED;
77  }
78  // The size the reference takes up on the stack.
79  int size() const {
80    return (type_ < SLOT) ? 0 : type_;
81  }
82
83  bool is_illegal() const { return type_ == ILLEGAL; }
84  bool is_slot() const { return type_ == SLOT; }
85  bool is_property() const { return type_ == NAMED || type_ == KEYED; }
86  bool is_unloaded() const { return type_ == UNLOADED; }
87
88  // Return the name.  Only valid for named property references.
89  Handle<String> GetName();
90
91  // Generate code to push the value of the reference on top of the
92  // expression stack.  The reference is expected to be already on top of
93  // the expression stack, and it is consumed by the call unless the
94  // reference is for a compound assignment.
95  // If the reference is not consumed, it is left in place under its value.
96  void GetValue();
97
98  // Like GetValue except that the slot is expected to be written to before
99  // being read from again.  The value of the reference may be invalidated,
100  // causing subsequent attempts to read it to fail.
101  void TakeValue();
102
103  // Generate code to store the value on top of the expression stack in the
104  // reference.  The reference is expected to be immediately below the value
105  // on the expression stack.  The  value is stored in the location specified
106  // by the reference, and is left on top of the stack, after the reference
107  // is popped from beneath it (unloaded).
108  void SetValue(InitState init_state);
109
110 private:
111  CodeGenerator* cgen_;
112  Expression* expression_;
113  Type type_;
114  bool persist_after_get_;
115};
116
117
118// -------------------------------------------------------------------------
119// Control destinations.
120
121// A control destination encapsulates a pair of jump targets and a
122// flag indicating which one is the preferred fall-through.  The
123// preferred fall-through must be unbound, the other may be already
124// bound (ie, a backward target).
125//
126// The true and false targets may be jumped to unconditionally or
127// control may split conditionally.  Unconditional jumping and
128// splitting should be emitted in tail position (as the last thing
129// when compiling an expression) because they can cause either label
130// to be bound or the non-fall through to be jumped to leaving an
131// invalid virtual frame.
132//
133// The labels in the control destination can be extracted and
134// manipulated normally without affecting the state of the
135// destination.
136
137class ControlDestination BASE_EMBEDDED {
138 public:
139  ControlDestination(JumpTarget* true_target,
140                     JumpTarget* false_target,
141                     bool true_is_fall_through)
142      : true_target_(true_target),
143        false_target_(false_target),
144        true_is_fall_through_(true_is_fall_through),
145        is_used_(false) {
146    ASSERT(true_is_fall_through ? !true_target->is_bound()
147                                : !false_target->is_bound());
148  }
149
150  // Accessors for the jump targets.  Directly jumping or branching to
151  // or binding the targets will not update the destination's state.
152  JumpTarget* true_target() const { return true_target_; }
153  JumpTarget* false_target() const { return false_target_; }
154
155  // True if the the destination has been jumped to unconditionally or
156  // control has been split to both targets.  This predicate does not
157  // test whether the targets have been extracted and manipulated as
158  // raw jump targets.
159  bool is_used() const { return is_used_; }
160
161  // True if the destination is used and the true target (respectively
162  // false target) was the fall through.  If the target is backward,
163  // "fall through" included jumping unconditionally to it.
164  bool true_was_fall_through() const {
165    return is_used_ && true_is_fall_through_;
166  }
167
168  bool false_was_fall_through() const {
169    return is_used_ && !true_is_fall_through_;
170  }
171
172  // Emit a branch to one of the true or false targets, and bind the
173  // other target.  Because this binds the fall-through target, it
174  // should be emitted in tail position (as the last thing when
175  // compiling an expression).
176  void Split(Condition cc) {
177    ASSERT(!is_used_);
178    if (true_is_fall_through_) {
179      false_target_->Branch(NegateCondition(cc));
180      true_target_->Bind();
181    } else {
182      true_target_->Branch(cc);
183      false_target_->Bind();
184    }
185    is_used_ = true;
186  }
187
188  // Emit an unconditional jump in tail position, to the true target
189  // (if the argument is true) or the false target.  The "jump" will
190  // actually bind the jump target if it is forward, jump to it if it
191  // is backward.
192  void Goto(bool where) {
193    ASSERT(!is_used_);
194    JumpTarget* target = where ? true_target_ : false_target_;
195    if (target->is_bound()) {
196      target->Jump();
197    } else {
198      target->Bind();
199    }
200    is_used_ = true;
201    true_is_fall_through_ = where;
202  }
203
204  // Mark this jump target as used as if Goto had been called, but
205  // without generating a jump or binding a label (the control effect
206  // should have already happened).  This is used when the left
207  // subexpression of the short-circuit boolean operators are
208  // compiled.
209  void Use(bool where) {
210    ASSERT(!is_used_);
211    ASSERT((where ? true_target_ : false_target_)->is_bound());
212    is_used_ = true;
213    true_is_fall_through_ = where;
214  }
215
216  // Swap the true and false targets but keep the same actual label as
217  // the fall through.  This is used when compiling negated
218  // expressions, where we want to swap the targets but preserve the
219  // state.
220  void Invert() {
221    JumpTarget* temp_target = true_target_;
222    true_target_ = false_target_;
223    false_target_ = temp_target;
224
225    true_is_fall_through_ = !true_is_fall_through_;
226  }
227
228 private:
229  // True and false jump targets.
230  JumpTarget* true_target_;
231  JumpTarget* false_target_;
232
233  // Before using the destination: true if the true target is the
234  // preferred fall through, false if the false target is.  After
235  // using the destination: true if the true target was actually used
236  // as the fall through, false if the false target was.
237  bool true_is_fall_through_;
238
239  // True if the Split or Goto functions have been called.
240  bool is_used_;
241};
242
243
244// -------------------------------------------------------------------------
245// Code generation state
246
247// The state is passed down the AST by the code generator (and back up, in
248// the form of the state of the jump target pair).  It is threaded through
249// the call stack.  Constructing a state implicitly pushes it on the owning
250// code generator's stack of states, and destroying one implicitly pops it.
251//
252// The code generator state is only used for expressions, so statements have
253// the initial state.
254
255class CodeGenState BASE_EMBEDDED {
256 public:
257  // Create an initial code generator state.  Destroying the initial state
258  // leaves the code generator with a NULL state.
259  explicit CodeGenState(CodeGenerator* owner);
260
261  // Create a code generator state based on a code generator's current
262  // state.  The new state has its own control destination.
263  CodeGenState(CodeGenerator* owner, ControlDestination* destination);
264
265  // Destroy a code generator state and restore the owning code generator's
266  // previous state.
267  ~CodeGenState();
268
269  // Accessors for the state.
270  ControlDestination* destination() const { return destination_; }
271
272 private:
273  // The owning code generator.
274  CodeGenerator* owner_;
275
276  // A control destination in case the expression has a control-flow
277  // effect.
278  ControlDestination* destination_;
279
280  // The previous state of the owning code generator, restored when
281  // this state is destroyed.
282  CodeGenState* previous_;
283};
284
285
286// -------------------------------------------------------------------------
287// Arguments allocation mode
288
289enum ArgumentsAllocationMode {
290  NO_ARGUMENTS_ALLOCATION,
291  EAGER_ARGUMENTS_ALLOCATION,
292  LAZY_ARGUMENTS_ALLOCATION
293};
294
295
296// -------------------------------------------------------------------------
297// CodeGenerator
298
299class CodeGenerator: public AstVisitor {
300 public:
301  // Takes a function literal, generates code for it. This function should only
302  // be called by compiler.cc.
303  static Handle<Code> MakeCode(CompilationInfo* info);
304
305  // Printing of AST, etc. as requested by flags.
306  static void MakeCodePrologue(CompilationInfo* info);
307
308  // Allocate and install the code.
309  static Handle<Code> MakeCodeEpilogue(MacroAssembler* masm,
310                                       Code::Flags flags,
311                                       CompilationInfo* info);
312
313#ifdef ENABLE_LOGGING_AND_PROFILING
314  static bool ShouldGenerateLog(Expression* type);
315#endif
316
317  static bool RecordPositions(MacroAssembler* masm,
318                              int pos,
319                              bool right_here = false);
320
321  // Accessors
322  MacroAssembler* masm() { return masm_; }
323  VirtualFrame* frame() const { return frame_; }
324  inline Handle<Script> script();
325
326  bool has_valid_frame() const { return frame_ != NULL; }
327
328  // Set the virtual frame to be new_frame, with non-frame register
329  // reference counts given by non_frame_registers.  The non-frame
330  // register reference counts of the old frame are returned in
331  // non_frame_registers.
332  void SetFrame(VirtualFrame* new_frame, RegisterFile* non_frame_registers);
333
334  void DeleteFrame();
335
336  RegisterAllocator* allocator() const { return allocator_; }
337
338  CodeGenState* state() { return state_; }
339  void set_state(CodeGenState* state) { state_ = state; }
340
341  void AddDeferred(DeferredCode* code) { deferred_.Add(code); }
342
343  bool in_spilled_code() const { return in_spilled_code_; }
344  void set_in_spilled_code(bool flag) { in_spilled_code_ = flag; }
345
346  // If the name is an inline runtime function call return the number of
347  // expected arguments. Otherwise return -1.
348  static int InlineRuntimeCallArgumentsCount(Handle<String> name);
349
350  static Operand ContextOperand(Register context, int index) {
351    return Operand(context, Context::SlotOffset(index));
352  }
353
354 private:
355  // Construction/Destruction
356  explicit CodeGenerator(MacroAssembler* masm);
357
358  // Accessors
359  inline bool is_eval();
360  inline Scope* scope();
361
362  // Generating deferred code.
363  void ProcessDeferred();
364
365  // State
366  ControlDestination* destination() const { return state_->destination(); }
367
368  // Track loop nesting level.
369  int loop_nesting() const { return loop_nesting_; }
370  void IncrementLoopNesting() { loop_nesting_++; }
371  void DecrementLoopNesting() { loop_nesting_--; }
372
373
374  // Node visitors.
375  void VisitStatements(ZoneList<Statement*>* statements);
376
377#define DEF_VISIT(type) \
378  void Visit##type(type* node);
379  AST_NODE_LIST(DEF_VISIT)
380#undef DEF_VISIT
381
382  // Visit a statement and then spill the virtual frame if control flow can
383  // reach the end of the statement (ie, it does not exit via break,
384  // continue, return, or throw).  This function is used temporarily while
385  // the code generator is being transformed.
386  void VisitAndSpill(Statement* statement);
387
388  // Visit a list of statements and then spill the virtual frame if control
389  // flow can reach the end of the list.
390  void VisitStatementsAndSpill(ZoneList<Statement*>* statements);
391
392  // Main code generation function
393  void Generate(CompilationInfo* info);
394
395  // Generate the return sequence code.  Should be called no more than
396  // once per compiled function, immediately after binding the return
397  // target (which can not be done more than once).
398  void GenerateReturnSequence(Result* return_value);
399
400  // Generate code for a fast smi loop.
401  void GenerateFastSmiLoop(ForStatement* node);
402
403  // Returns the arguments allocation mode.
404  ArgumentsAllocationMode ArgumentsMode();
405
406  // Store the arguments object and allocate it if necessary.
407  Result StoreArgumentsObject(bool initial);
408
409  // The following are used by class Reference.
410  void LoadReference(Reference* ref);
411  void UnloadReference(Reference* ref);
412
413  Operand SlotOperand(Slot* slot, Register tmp);
414
415  Operand ContextSlotOperandCheckExtensions(Slot* slot,
416                                            Result tmp,
417                                            JumpTarget* slow);
418
419  // Expressions
420  static Operand GlobalObject() {
421    return ContextOperand(rsi, Context::GLOBAL_INDEX);
422  }
423
424  void LoadCondition(Expression* x,
425                     ControlDestination* destination,
426                     bool force_control);
427  void Load(Expression* expr);
428  void LoadGlobal();
429  void LoadGlobalReceiver();
430
431  // Generate code to push the value of an expression on top of the frame
432  // and then spill the frame fully to memory.  This function is used
433  // temporarily while the code generator is being transformed.
434  void LoadAndSpill(Expression* expression);
435
436  // Read a value from a slot and leave it on top of the expression stack.
437  void LoadFromSlot(Slot* slot, TypeofState typeof_state);
438  void LoadFromSlotCheckForArguments(Slot* slot, TypeofState state);
439  Result LoadFromGlobalSlotCheckExtensions(Slot* slot,
440                                           TypeofState typeof_state,
441                                           JumpTarget* slow);
442
443  // Support for loading from local/global variables and arguments
444  // whose location is known unless they are shadowed by
445  // eval-introduced bindings. Generates no code for unsupported slot
446  // types and therefore expects to fall through to the slow jump target.
447  void EmitDynamicLoadFromSlotFastCase(Slot* slot,
448                                       TypeofState typeof_state,
449                                       Result* result,
450                                       JumpTarget* slow,
451                                       JumpTarget* done);
452
453  // Store the value on top of the expression stack into a slot, leaving the
454  // value in place.
455  void StoreToSlot(Slot* slot, InitState init_state);
456
457  // Support for compiling assignment expressions.
458  void EmitSlotAssignment(Assignment* node);
459  void EmitNamedPropertyAssignment(Assignment* node);
460  void EmitKeyedPropertyAssignment(Assignment* node);
461
462  // Receiver is passed on the frame and not consumed.
463  Result EmitNamedLoad(Handle<String> name, bool is_contextual);
464
465  // If the store is contextual, value is passed on the frame and consumed.
466  // Otherwise, receiver and value are passed on the frame and consumed.
467  Result EmitNamedStore(Handle<String> name, bool is_contextual);
468
469  // Load a property of an object, returning it in a Result.
470  // The object and the property name are passed on the stack, and
471  // not changed.
472  Result EmitKeyedLoad();
473
474  // Receiver, key, and value are passed on the frame and consumed.
475  Result EmitKeyedStore(StaticType* key_type);
476
477  // Special code for typeof expressions: Unfortunately, we must
478  // be careful when loading the expression in 'typeof'
479  // expressions. We are not allowed to throw reference errors for
480  // non-existing properties of the global object, so we must make it
481  // look like an explicit property access, instead of an access
482  // through the context chain.
483  void LoadTypeofExpression(Expression* x);
484
485  // Translate the value on top of the frame into control flow to the
486  // control destination.
487  void ToBoolean(ControlDestination* destination);
488
489  // Generate code that computes a shortcutting logical operation.
490  void GenerateLogicalBooleanOperation(BinaryOperation* node);
491
492  void GenericBinaryOperation(BinaryOperation* expr,
493                              OverwriteMode overwrite_mode);
494
495  // Emits code sequence that jumps to a JumpTarget if the inputs
496  // are both smis.  Cannot be in MacroAssembler because it takes
497  // advantage of TypeInfo to skip unneeded checks.
498  void JumpIfBothSmiUsingTypeInfo(Result* left,
499                                  Result* right,
500                                  JumpTarget* both_smi);
501
502  // Emits code sequence that jumps to deferred code if the input
503  // is not a smi.  Cannot be in MacroAssembler because it takes
504  // advantage of TypeInfo to skip unneeded checks.
505  void JumpIfNotSmiUsingTypeInfo(Register reg,
506                                 TypeInfo type,
507                                 DeferredCode* deferred);
508
509  // Emits code sequence that jumps to deferred code if the inputs
510  // are not both smis.  Cannot be in MacroAssembler because it takes
511  // advantage of TypeInfo to skip unneeded checks.
512  void JumpIfNotBothSmiUsingTypeInfo(Register left,
513                                     Register right,
514                                     TypeInfo left_info,
515                                     TypeInfo right_info,
516                                     DeferredCode* deferred);
517
518  // If possible, combine two constant smi values using op to produce
519  // a smi result, and push it on the virtual frame, all at compile time.
520  // Returns true if it succeeds.  Otherwise it has no effect.
521  bool FoldConstantSmis(Token::Value op, int left, int right);
522
523  // Emit code to perform a binary operation on a constant
524  // smi and a likely smi.  Consumes the Result *operand.
525  Result ConstantSmiBinaryOperation(BinaryOperation* expr,
526                                    Result* operand,
527                                    Handle<Object> constant_operand,
528                                    bool reversed,
529                                    OverwriteMode overwrite_mode);
530
531  // Emit code to perform a binary operation on two likely smis.
532  // The code to handle smi arguments is produced inline.
533  // Consumes the Results *left and *right.
534  Result LikelySmiBinaryOperation(BinaryOperation* expr,
535                                  Result* left,
536                                  Result* right,
537                                  OverwriteMode overwrite_mode);
538
539  void Comparison(AstNode* node,
540                  Condition cc,
541                  bool strict,
542                  ControlDestination* destination);
543
544  // If at least one of the sides is a constant smi, generate optimized code.
545  void ConstantSmiComparison(Condition cc,
546                             bool strict,
547                             ControlDestination* destination,
548                             Result* left_side,
549                             Result* right_side,
550                             bool left_side_constant_smi,
551                             bool right_side_constant_smi,
552                             bool is_loop_condition);
553
554  void GenerateInlineNumberComparison(Result* left_side,
555                                      Result* right_side,
556                                      Condition cc,
557                                      ControlDestination* dest);
558
559  // To prevent long attacker-controlled byte sequences, integer constants
560  // from the JavaScript source are loaded in two parts if they are larger
561  // than 16 bits.
562  static const int kMaxSmiInlinedBits = 16;
563  bool IsUnsafeSmi(Handle<Object> value);
564  // Load an integer constant x into a register target using
565  // at most 16 bits of user-controlled data per assembly operation.
566  void LoadUnsafeSmi(Register target, Handle<Object> value);
567
568  void CallWithArguments(ZoneList<Expression*>* arguments,
569                         CallFunctionFlags flags,
570                         int position);
571
572  // An optimized implementation of expressions of the form
573  // x.apply(y, arguments).  We call x the applicand and y the receiver.
574  // The optimization avoids allocating an arguments object if possible.
575  void CallApplyLazy(Expression* applicand,
576                     Expression* receiver,
577                     VariableProxy* arguments,
578                     int position);
579
580  void CheckStack();
581
582  struct InlineRuntimeLUT {
583    void (CodeGenerator::*method)(ZoneList<Expression*>*);
584    const char* name;
585    int nargs;
586  };
587  static InlineRuntimeLUT* FindInlineRuntimeLUT(Handle<String> name);
588  bool CheckForInlineRuntimeCall(CallRuntime* node);
589  static bool PatchInlineRuntimeEntry(Handle<String> name,
590                                      const InlineRuntimeLUT& new_entry,
591                                      InlineRuntimeLUT* old_entry);
592  void ProcessDeclarations(ZoneList<Declaration*>* declarations);
593
594  static Handle<Code> ComputeCallInitialize(int argc, InLoopFlag in_loop);
595
596  static Handle<Code> ComputeKeyedCallInitialize(int argc, InLoopFlag in_loop);
597
598  // Declare global variables and functions in the given array of
599  // name/value pairs.
600  void DeclareGlobals(Handle<FixedArray> pairs);
601
602  // Instantiate the function based on the shared function info.
603  void InstantiateFunction(Handle<SharedFunctionInfo> function_info);
604
605  // Support for type checks.
606  void GenerateIsSmi(ZoneList<Expression*>* args);
607  void GenerateIsNonNegativeSmi(ZoneList<Expression*>* args);
608  void GenerateIsArray(ZoneList<Expression*>* args);
609  void GenerateIsRegExp(ZoneList<Expression*>* args);
610  void GenerateIsObject(ZoneList<Expression*>* args);
611  void GenerateIsSpecObject(ZoneList<Expression*>* args);
612  void GenerateIsFunction(ZoneList<Expression*>* args);
613  void GenerateIsUndetectableObject(ZoneList<Expression*>* args);
614  void GenerateIsStringWrapperSafeForDefaultValueOf(
615      ZoneList<Expression*>* args);
616
617  // Support for construct call checks.
618  void GenerateIsConstructCall(ZoneList<Expression*>* args);
619
620  // Support for arguments.length and arguments[?].
621  void GenerateArgumentsLength(ZoneList<Expression*>* args);
622  void GenerateArguments(ZoneList<Expression*>* args);
623
624  // Support for accessing the class and value fields of an object.
625  void GenerateClassOf(ZoneList<Expression*>* args);
626  void GenerateValueOf(ZoneList<Expression*>* args);
627  void GenerateSetValueOf(ZoneList<Expression*>* args);
628
629  // Fast support for charCodeAt(n).
630  void GenerateStringCharCodeAt(ZoneList<Expression*>* args);
631
632  // Fast support for string.charAt(n) and string[n].
633  void GenerateStringCharFromCode(ZoneList<Expression*>* args);
634
635  // Fast support for string.charAt(n) and string[n].
636  void GenerateStringCharAt(ZoneList<Expression*>* args);
637
638  // Fast support for object equality testing.
639  void GenerateObjectEquals(ZoneList<Expression*>* args);
640
641  void GenerateLog(ZoneList<Expression*>* args);
642
643  void GenerateGetFramePointer(ZoneList<Expression*>* args);
644
645  // Fast support for Math.random().
646  void GenerateRandomHeapNumber(ZoneList<Expression*>* args);
647
648  // Fast support for StringAdd.
649  void GenerateStringAdd(ZoneList<Expression*>* args);
650
651  // Fast support for SubString.
652  void GenerateSubString(ZoneList<Expression*>* args);
653
654  // Fast support for StringCompare.
655  void GenerateStringCompare(ZoneList<Expression*>* args);
656
657  // Support for direct calls from JavaScript to native RegExp code.
658  void GenerateRegExpExec(ZoneList<Expression*>* args);
659
660  void GenerateRegExpConstructResult(ZoneList<Expression*>* args);
661
662  void GenerateRegExpCloneResult(ZoneList<Expression*>* args);
663
664  // Support for fast native caches.
665  void GenerateGetFromCache(ZoneList<Expression*>* args);
666
667  // Fast support for number to string.
668  void GenerateNumberToString(ZoneList<Expression*>* args);
669
670  // Fast swapping of elements. Takes three expressions, the object and two
671  // indices. This should only be used if the indices are known to be
672  // non-negative and within bounds of the elements array at the call site.
673  void GenerateSwapElements(ZoneList<Expression*>* args);
674
675  // Fast call for custom callbacks.
676  void GenerateCallFunction(ZoneList<Expression*>* args);
677
678  // Fast call to math functions.
679  void GenerateMathPow(ZoneList<Expression*>* args);
680  void GenerateMathSin(ZoneList<Expression*>* args);
681  void GenerateMathCos(ZoneList<Expression*>* args);
682  void GenerateMathSqrt(ZoneList<Expression*>* args);
683
684  void GenerateIsRegExpEquivalent(ZoneList<Expression*>* args);
685
686// Simple condition analysis.
687  enum ConditionAnalysis {
688    ALWAYS_TRUE,
689    ALWAYS_FALSE,
690    DONT_KNOW
691  };
692  ConditionAnalysis AnalyzeCondition(Expression* cond);
693
694  // Methods used to indicate which source code is generated for. Source
695  // positions are collected by the assembler and emitted with the relocation
696  // information.
697  void CodeForFunctionPosition(FunctionLiteral* fun);
698  void CodeForReturnPosition(FunctionLiteral* fun);
699  void CodeForStatementPosition(Statement* node);
700  void CodeForDoWhileConditionPosition(DoWhileStatement* stmt);
701  void CodeForSourcePosition(int pos);
702
703  void SetTypeForStackSlot(Slot* slot, TypeInfo info);
704
705#ifdef DEBUG
706  // True if the registers are valid for entry to a block.  There should
707  // be no frame-external references to (non-reserved) registers.
708  bool HasValidEntryRegisters();
709#endif
710
711  ZoneList<DeferredCode*> deferred_;
712
713  // Assembler
714  MacroAssembler* masm_;  // to generate code
715
716  CompilationInfo* info_;
717
718  // Code generation state
719  VirtualFrame* frame_;
720  RegisterAllocator* allocator_;
721  CodeGenState* state_;
722  int loop_nesting_;
723
724  // Jump targets.
725  // The target of the return from the function.
726  BreakTarget function_return_;
727
728  // True if the function return is shadowed (ie, jumping to the target
729  // function_return_ does not jump to the true function return, but rather
730  // to some unlinking code).
731  bool function_return_is_shadowed_;
732
733  // True when we are in code that expects the virtual frame to be fully
734  // spilled.  Some virtual frame function are disabled in DEBUG builds when
735  // called from spilled code, because they do not leave the virtual frame
736  // in a spilled state.
737  bool in_spilled_code_;
738
739  static InlineRuntimeLUT kInlineRuntimeLUT[];
740
741  friend class VirtualFrame;
742  friend class JumpTarget;
743  friend class Reference;
744  friend class Result;
745  friend class FastCodeGenerator;
746  friend class FullCodeGenerator;
747  friend class FullCodeGenSyntaxChecker;
748
749  friend class CodeGeneratorPatcher;  // Used in test-log-stack-tracer.cc
750
751  DISALLOW_COPY_AND_ASSIGN(CodeGenerator);
752};
753
754
755// Compute a transcendental math function natively, or call the
756// TranscendentalCache runtime function.
757class TranscendentalCacheStub: public CodeStub {
758 public:
759  explicit TranscendentalCacheStub(TranscendentalCache::Type type)
760      : type_(type) {}
761  void Generate(MacroAssembler* masm);
762 private:
763  TranscendentalCache::Type type_;
764  Major MajorKey() { return TranscendentalCache; }
765  int MinorKey() { return type_; }
766  Runtime::FunctionId RuntimeFunction();
767  void GenerateOperation(MacroAssembler* masm, Label* on_nan_result);
768};
769
770
771class ToBooleanStub: public CodeStub {
772 public:
773  ToBooleanStub() { }
774
775  void Generate(MacroAssembler* masm);
776
777 private:
778  Major MajorKey() { return ToBoolean; }
779  int MinorKey() { return 0; }
780};
781
782
783// Flag that indicates how to generate code for the stub GenericBinaryOpStub.
784enum GenericBinaryFlags {
785  NO_GENERIC_BINARY_FLAGS = 0,
786  NO_SMI_CODE_IN_STUB = 1 << 0  // Omit smi code in stub.
787};
788
789
790class GenericBinaryOpStub: public CodeStub {
791 public:
792  GenericBinaryOpStub(Token::Value op,
793                      OverwriteMode mode,
794                      GenericBinaryFlags flags,
795                      TypeInfo operands_type = TypeInfo::Unknown())
796      : op_(op),
797        mode_(mode),
798        flags_(flags),
799        args_in_registers_(false),
800        args_reversed_(false),
801        static_operands_type_(operands_type),
802        runtime_operands_type_(BinaryOpIC::DEFAULT),
803        name_(NULL) {
804    ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
805  }
806
807  GenericBinaryOpStub(int key, BinaryOpIC::TypeInfo type_info)
808      : op_(OpBits::decode(key)),
809        mode_(ModeBits::decode(key)),
810        flags_(FlagBits::decode(key)),
811        args_in_registers_(ArgsInRegistersBits::decode(key)),
812        args_reversed_(ArgsReversedBits::decode(key)),
813        static_operands_type_(TypeInfo::ExpandedRepresentation(
814            StaticTypeInfoBits::decode(key))),
815        runtime_operands_type_(type_info),
816        name_(NULL) {
817  }
818
819  // Generate code to call the stub with the supplied arguments. This will add
820  // code at the call site to prepare arguments either in registers or on the
821  // stack together with the actual call.
822  void GenerateCall(MacroAssembler* masm, Register left, Register right);
823  void GenerateCall(MacroAssembler* masm, Register left, Smi* right);
824  void GenerateCall(MacroAssembler* masm, Smi* left, Register right);
825
826  Result GenerateCall(MacroAssembler* masm,
827                      VirtualFrame* frame,
828                      Result* left,
829                      Result* right);
830
831 private:
832  Token::Value op_;
833  OverwriteMode mode_;
834  GenericBinaryFlags flags_;
835  bool args_in_registers_;  // Arguments passed in registers not on the stack.
836  bool args_reversed_;  // Left and right argument are swapped.
837
838  // Number type information of operands, determined by code generator.
839  TypeInfo static_operands_type_;
840
841  // Operand type information determined at runtime.
842  BinaryOpIC::TypeInfo runtime_operands_type_;
843
844  char* name_;
845
846  const char* GetName();
847
848#ifdef DEBUG
849  void Print() {
850    PrintF("GenericBinaryOpStub %d (op %s), "
851           "(mode %d, flags %d, registers %d, reversed %d, only_numbers %s)\n",
852           MinorKey(),
853           Token::String(op_),
854           static_cast<int>(mode_),
855           static_cast<int>(flags_),
856           static_cast<int>(args_in_registers_),
857           static_cast<int>(args_reversed_),
858           static_operands_type_.ToString());
859  }
860#endif
861
862  // Minor key encoding in 17 bits TTNNNFRAOOOOOOOMM.
863  class ModeBits: public BitField<OverwriteMode, 0, 2> {};
864  class OpBits: public BitField<Token::Value, 2, 7> {};
865  class ArgsInRegistersBits: public BitField<bool, 9, 1> {};
866  class ArgsReversedBits: public BitField<bool, 10, 1> {};
867  class FlagBits: public BitField<GenericBinaryFlags, 11, 1> {};
868  class StaticTypeInfoBits: public BitField<int, 12, 3> {};
869  class RuntimeTypeInfoBits: public BitField<BinaryOpIC::TypeInfo, 15, 2> {};
870
871  Major MajorKey() { return GenericBinaryOp; }
872  int MinorKey() {
873    // Encode the parameters in a unique 18 bit value.
874    return OpBits::encode(op_)
875           | ModeBits::encode(mode_)
876           | FlagBits::encode(flags_)
877           | ArgsInRegistersBits::encode(args_in_registers_)
878           | ArgsReversedBits::encode(args_reversed_)
879           | StaticTypeInfoBits::encode(
880               static_operands_type_.ThreeBitRepresentation())
881           | RuntimeTypeInfoBits::encode(runtime_operands_type_);
882  }
883
884  void Generate(MacroAssembler* masm);
885  void GenerateSmiCode(MacroAssembler* masm, Label* slow);
886  void GenerateLoadArguments(MacroAssembler* masm);
887  void GenerateReturn(MacroAssembler* masm);
888  void GenerateRegisterArgsPush(MacroAssembler* masm);
889  void GenerateTypeTransition(MacroAssembler* masm);
890
891  bool ArgsInRegistersSupported() {
892    return (op_ == Token::ADD) || (op_ == Token::SUB)
893        || (op_ == Token::MUL) || (op_ == Token::DIV);
894  }
895  bool IsOperationCommutative() {
896    return (op_ == Token::ADD) || (op_ == Token::MUL);
897  }
898
899  void SetArgsInRegisters() { args_in_registers_ = true; }
900  void SetArgsReversed() { args_reversed_ = true; }
901  bool HasSmiCodeInStub() { return (flags_ & NO_SMI_CODE_IN_STUB) == 0; }
902  bool HasArgsInRegisters() { return args_in_registers_; }
903  bool HasArgsReversed() { return args_reversed_; }
904
905  bool ShouldGenerateSmiCode() {
906    return HasSmiCodeInStub() &&
907        runtime_operands_type_ != BinaryOpIC::HEAP_NUMBERS &&
908        runtime_operands_type_ != BinaryOpIC::STRINGS;
909  }
910
911  bool ShouldGenerateFPCode() {
912    return runtime_operands_type_ != BinaryOpIC::STRINGS;
913  }
914
915  virtual int GetCodeKind() { return Code::BINARY_OP_IC; }
916
917  virtual InlineCacheState GetICState() {
918    return BinaryOpIC::ToState(runtime_operands_type_);
919  }
920};
921
922class StringHelper : public AllStatic {
923 public:
924  // Generate code for copying characters using a simple loop. This should only
925  // be used in places where the number of characters is small and the
926  // additional setup and checking in GenerateCopyCharactersREP adds too much
927  // overhead. Copying of overlapping regions is not supported.
928  static void GenerateCopyCharacters(MacroAssembler* masm,
929                                     Register dest,
930                                     Register src,
931                                     Register count,
932                                     bool ascii);
933
934  // Generate code for copying characters using the rep movs instruction.
935  // Copies rcx characters from rsi to rdi. Copying of overlapping regions is
936  // not supported.
937  static void GenerateCopyCharactersREP(MacroAssembler* masm,
938                                        Register dest,     // Must be rdi.
939                                        Register src,      // Must be rsi.
940                                        Register count,    // Must be rcx.
941                                        bool ascii);
942
943
944  // Probe the symbol table for a two character string. If the string is
945  // not found by probing a jump to the label not_found is performed. This jump
946  // does not guarantee that the string is not in the symbol table. If the
947  // string is found the code falls through with the string in register rax.
948  static void GenerateTwoCharacterSymbolTableProbe(MacroAssembler* masm,
949                                                   Register c1,
950                                                   Register c2,
951                                                   Register scratch1,
952                                                   Register scratch2,
953                                                   Register scratch3,
954                                                   Register scratch4,
955                                                   Label* not_found);
956
957  // Generate string hash.
958  static void GenerateHashInit(MacroAssembler* masm,
959                               Register hash,
960                               Register character,
961                               Register scratch);
962  static void GenerateHashAddCharacter(MacroAssembler* masm,
963                                       Register hash,
964                                       Register character,
965                                       Register scratch);
966  static void GenerateHashGetHash(MacroAssembler* masm,
967                                  Register hash,
968                                  Register scratch);
969
970 private:
971  DISALLOW_IMPLICIT_CONSTRUCTORS(StringHelper);
972};
973
974
975// Flag that indicates how to generate code for the stub StringAddStub.
976enum StringAddFlags {
977  NO_STRING_ADD_FLAGS = 0,
978  NO_STRING_CHECK_IN_STUB = 1 << 0  // Omit string check in stub.
979};
980
981
982class StringAddStub: public CodeStub {
983 public:
984  explicit StringAddStub(StringAddFlags flags) {
985    string_check_ = ((flags & NO_STRING_CHECK_IN_STUB) == 0);
986  }
987
988 private:
989  Major MajorKey() { return StringAdd; }
990  int MinorKey() { return string_check_ ? 0 : 1; }
991
992  void Generate(MacroAssembler* masm);
993
994  // Should the stub check whether arguments are strings?
995  bool string_check_;
996};
997
998
999class SubStringStub: public CodeStub {
1000 public:
1001  SubStringStub() {}
1002
1003 private:
1004  Major MajorKey() { return SubString; }
1005  int MinorKey() { return 0; }
1006
1007  void Generate(MacroAssembler* masm);
1008};
1009
1010
1011class StringCompareStub: public CodeStub {
1012 public:
1013  explicit StringCompareStub() {}
1014
1015  // Compare two flat ascii strings and returns result in rax after popping two
1016  // arguments from the stack.
1017  static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
1018                                              Register left,
1019                                              Register right,
1020                                              Register scratch1,
1021                                              Register scratch2,
1022                                              Register scratch3,
1023                                              Register scratch4);
1024
1025 private:
1026  Major MajorKey() { return StringCompare; }
1027  int MinorKey() { return 0; }
1028
1029  void Generate(MacroAssembler* masm);
1030};
1031
1032
1033class NumberToStringStub: public CodeStub {
1034 public:
1035  NumberToStringStub() { }
1036
1037  // Generate code to do a lookup in the number string cache. If the number in
1038  // the register object is found in the cache the generated code falls through
1039  // with the result in the result register. The object and the result register
1040  // can be the same. If the number is not found in the cache the code jumps to
1041  // the label not_found with only the content of register object unchanged.
1042  static void GenerateLookupNumberStringCache(MacroAssembler* masm,
1043                                              Register object,
1044                                              Register result,
1045                                              Register scratch1,
1046                                              Register scratch2,
1047                                              bool object_is_smi,
1048                                              Label* not_found);
1049
1050 private:
1051  static void GenerateConvertHashCodeToIndex(MacroAssembler* masm,
1052                                             Register hash,
1053                                             Register mask);
1054
1055  Major MajorKey() { return NumberToString; }
1056  int MinorKey() { return 0; }
1057
1058  void Generate(MacroAssembler* masm);
1059
1060  const char* GetName() { return "NumberToStringStub"; }
1061
1062#ifdef DEBUG
1063  void Print() {
1064    PrintF("NumberToStringStub\n");
1065  }
1066#endif
1067};
1068
1069
1070class RecordWriteStub : public CodeStub {
1071 public:
1072  RecordWriteStub(Register object, Register addr, Register scratch)
1073      : object_(object), addr_(addr), scratch_(scratch) { }
1074
1075  void Generate(MacroAssembler* masm);
1076
1077 private:
1078  Register object_;
1079  Register addr_;
1080  Register scratch_;
1081
1082#ifdef DEBUG
1083  void Print() {
1084    PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
1085           object_.code(), addr_.code(), scratch_.code());
1086  }
1087#endif
1088
1089  // Minor key encoding in 12 bits. 4 bits for each of the three
1090  // registers (object, address and scratch) OOOOAAAASSSS.
1091  class ScratchBits : public BitField<uint32_t, 0, 4> {};
1092  class AddressBits : public BitField<uint32_t, 4, 4> {};
1093  class ObjectBits : public BitField<uint32_t, 8, 4> {};
1094
1095  Major MajorKey() { return RecordWrite; }
1096
1097  int MinorKey() {
1098    // Encode the registers.
1099    return ObjectBits::encode(object_.code()) |
1100           AddressBits::encode(addr_.code()) |
1101           ScratchBits::encode(scratch_.code());
1102  }
1103};
1104
1105
1106} }  // namespace v8::internal
1107
1108#endif  // V8_X64_CODEGEN_X64_H_
1109