1// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_IA32_LITHIUM_CODEGEN_IA32_H_
6#define V8_IA32_LITHIUM_CODEGEN_IA32_H_
7
8#include "src/ia32/lithium-ia32.h"
9
10#include "src/base/logging.h"
11#include "src/deoptimizer.h"
12#include "src/ia32/lithium-gap-resolver-ia32.h"
13#include "src/lithium-codegen.h"
14#include "src/safepoint-table.h"
15#include "src/scopes.h"
16#include "src/utils.h"
17
18namespace v8 {
19namespace internal {
20
21// Forward declarations.
22class LDeferredCode;
23class LGapNode;
24class SafepointGenerator;
25
26class LCodeGen: public LCodeGenBase {
27 public:
28  LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
29      : LCodeGenBase(chunk, assembler, info),
30        deoptimizations_(4, info->zone()),
31        jump_table_(4, info->zone()),
32        deoptimization_literals_(8, info->zone()),
33        inlined_function_count_(0),
34        scope_(info->scope()),
35        translations_(info->zone()),
36        deferred_(8, info->zone()),
37        dynamic_frame_alignment_(false),
38        support_aligned_spilled_doubles_(false),
39        osr_pc_offset_(-1),
40        frame_is_built_(false),
41        safepoints_(info->zone()),
42        resolver_(this),
43        expected_safepoint_kind_(Safepoint::kSimple) {
44    PopulateDeoptimizationLiteralsWithInlinedFunctions();
45  }
46
47  int LookupDestination(int block_id) const {
48    return chunk()->LookupDestination(block_id);
49  }
50
51  bool IsNextEmittedBlock(int block_id) const {
52    return LookupDestination(block_id) == GetNextEmittedBlock();
53  }
54
55  bool NeedsEagerFrame() const {
56    return GetStackSlotCount() > 0 ||
57        info()->is_non_deferred_calling() ||
58        !info()->IsStub() ||
59        info()->requires_frame();
60  }
61  bool NeedsDeferredFrame() const {
62    return !NeedsEagerFrame() && info()->is_deferred_calling();
63  }
64
65  // Support for converting LOperands to assembler types.
66  Operand ToOperand(LOperand* op) const;
67  Register ToRegister(LOperand* op) const;
68  XMMRegister ToDoubleRegister(LOperand* op) const;
69
70  bool IsInteger32(LConstantOperand* op) const;
71  bool IsSmi(LConstantOperand* op) const;
72  Immediate ToImmediate(LOperand* op, const Representation& r) const {
73    return Immediate(ToRepresentation(LConstantOperand::cast(op), r));
74  }
75  double ToDouble(LConstantOperand* op) const;
76
77  Handle<Object> ToHandle(LConstantOperand* op) const;
78
79  // The operand denoting the second word (the one with a higher address) of
80  // a double stack slot.
81  Operand HighOperand(LOperand* op);
82
83  // Try to generate code for the entire chunk, but it may fail if the
84  // chunk contains constructs we cannot handle. Returns true if the
85  // code generation attempt succeeded.
86  bool GenerateCode();
87
88  // Finish the code by setting stack height, safepoint, and bailout
89  // information on it.
90  void FinishCode(Handle<Code> code);
91
92  // Deferred code support.
93  void DoDeferredNumberTagD(LNumberTagD* instr);
94
95  enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
96  void DoDeferredNumberTagIU(LInstruction* instr,
97                             LOperand* value,
98                             LOperand* temp,
99                             IntegerSignedness signedness);
100
101  void DoDeferredTaggedToI(LTaggedToI* instr, Label* done);
102  void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
103  void DoDeferredStackCheck(LStackCheck* instr);
104  void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
105  void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
106  void DoDeferredAllocate(LAllocate* instr);
107  void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
108                                       Label* map_check);
109  void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
110  void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
111                                   Register object,
112                                   Register index);
113
114  // Parallel move support.
115  void DoParallelMove(LParallelMove* move);
116  void DoGap(LGap* instr);
117
118  // Emit frame translation commands for an environment.
119  void WriteTranslation(LEnvironment* environment, Translation* translation);
120
121  void EnsureRelocSpaceForDeoptimization();
122
123  // Declare methods that deal with the individual node types.
124#define DECLARE_DO(type) void Do##type(L##type* node);
125  LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
126#undef DECLARE_DO
127
128 private:
129  StrictMode strict_mode() const { return info()->strict_mode(); }
130
131  Scope* scope() const { return scope_; }
132
133  XMMRegister double_scratch0() const { return xmm0; }
134
135  void EmitClassOfTest(Label* if_true,
136                       Label* if_false,
137                       Handle<String> class_name,
138                       Register input,
139                       Register temporary,
140                       Register temporary2);
141
142  int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
143
144  void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
145
146  void SaveCallerDoubles();
147  void RestoreCallerDoubles();
148
149  // Code generation passes.  Returns true if code generation should
150  // continue.
151  void GenerateBodyInstructionPre(LInstruction* instr) OVERRIDE;
152  void GenerateBodyInstructionPost(LInstruction* instr) OVERRIDE;
153  bool GeneratePrologue();
154  bool GenerateDeferredCode();
155  bool GenerateJumpTable();
156  bool GenerateSafepointTable();
157
158  // Generates the custom OSR entrypoint and sets the osr_pc_offset.
159  void GenerateOsrPrologue();
160
161  enum SafepointMode {
162    RECORD_SIMPLE_SAFEPOINT,
163    RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
164  };
165
166  void CallCode(Handle<Code> code,
167                RelocInfo::Mode mode,
168                LInstruction* instr);
169
170  void CallCodeGeneric(Handle<Code> code,
171                       RelocInfo::Mode mode,
172                       LInstruction* instr,
173                       SafepointMode safepoint_mode);
174
175  void CallRuntime(const Runtime::Function* fun,
176                   int argc,
177                   LInstruction* instr,
178                   SaveFPRegsMode save_doubles = kDontSaveFPRegs);
179
180  void CallRuntime(Runtime::FunctionId id,
181                   int argc,
182                   LInstruction* instr) {
183    const Runtime::Function* function = Runtime::FunctionForId(id);
184    CallRuntime(function, argc, instr);
185  }
186
187  void CallRuntimeFromDeferred(Runtime::FunctionId id,
188                               int argc,
189                               LInstruction* instr,
190                               LOperand* context);
191
192  void LoadContextFromDeferred(LOperand* context);
193
194  enum EDIState {
195    EDI_UNINITIALIZED,
196    EDI_CONTAINS_TARGET
197  };
198
199  // Generate a direct call to a known function.  Expects the function
200  // to be in edi.
201  void CallKnownFunction(Handle<JSFunction> function,
202                         int formal_parameter_count,
203                         int arity,
204                         LInstruction* instr,
205                         EDIState edi_state);
206
207  void RecordSafepointWithLazyDeopt(LInstruction* instr,
208                                    SafepointMode safepoint_mode);
209
210  void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
211                                            Safepoint::DeoptMode mode);
212  void DeoptimizeIf(Condition cc, LInstruction* instr, const char* detail,
213                    Deoptimizer::BailoutType bailout_type);
214  void DeoptimizeIf(Condition cc, LInstruction* instr, const char* detail);
215
216  bool DeoptEveryNTimes() {
217    return FLAG_deopt_every_n_times != 0 && !info()->IsStub();
218  }
219
220  void AddToTranslation(LEnvironment* environment,
221                        Translation* translation,
222                        LOperand* op,
223                        bool is_tagged,
224                        bool is_uint32,
225                        int* object_index_pointer,
226                        int* dematerialized_index_pointer);
227  void PopulateDeoptimizationData(Handle<Code> code);
228  int DefineDeoptimizationLiteral(Handle<Object> literal);
229
230  void PopulateDeoptimizationLiteralsWithInlinedFunctions();
231
232  Register ToRegister(int index) const;
233  XMMRegister ToDoubleRegister(int index) const;
234  int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
235  int32_t ToInteger32(LConstantOperand* op) const;
236  ExternalReference ToExternalReference(LConstantOperand* op) const;
237
238  Operand BuildFastArrayOperand(LOperand* elements_pointer,
239                                LOperand* key,
240                                Representation key_representation,
241                                ElementsKind elements_kind,
242                                uint32_t base_offset);
243
244  Operand BuildSeqStringOperand(Register string,
245                                LOperand* index,
246                                String::Encoding encoding);
247
248  void EmitIntegerMathAbs(LMathAbs* instr);
249
250  // Support for recording safepoint and position information.
251  void RecordSafepoint(LPointerMap* pointers,
252                       Safepoint::Kind kind,
253                       int arguments,
254                       Safepoint::DeoptMode mode);
255  void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
256  void RecordSafepoint(Safepoint::DeoptMode mode);
257  void RecordSafepointWithRegisters(LPointerMap* pointers,
258                                    int arguments,
259                                    Safepoint::DeoptMode mode);
260
261  void RecordAndWritePosition(int position) OVERRIDE;
262
263  static Condition TokenToCondition(Token::Value op, bool is_unsigned);
264  void EmitGoto(int block);
265
266  // EmitBranch expects to be the last instruction of a block.
267  template<class InstrType>
268  void EmitBranch(InstrType instr, Condition cc);
269  template<class InstrType>
270  void EmitFalseBranch(InstrType instr, Condition cc);
271  void EmitNumberUntagD(LNumberUntagD* instr, Register input, Register temp,
272                        XMMRegister result, NumberUntagDMode mode);
273
274  // Emits optimized code for typeof x == "y".  Modifies input register.
275  // Returns the condition on which a final split to
276  // true and false label should be made, to optimize fallthrough.
277  Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input);
278
279  // Emits optimized code for %_IsObject(x).  Preserves input register.
280  // Returns the condition on which a final split to
281  // true and false label should be made, to optimize fallthrough.
282  Condition EmitIsObject(Register input,
283                         Register temp1,
284                         Label* is_not_object,
285                         Label* is_object);
286
287  // Emits optimized code for %_IsString(x).  Preserves input register.
288  // Returns the condition on which a final split to
289  // true and false label should be made, to optimize fallthrough.
290  Condition EmitIsString(Register input,
291                         Register temp1,
292                         Label* is_not_string,
293                         SmiCheck check_needed);
294
295  // Emits optimized code for %_IsConstructCall().
296  // Caller should branch on equal condition.
297  void EmitIsConstructCall(Register temp);
298
299  // Emits optimized code to deep-copy the contents of statically known
300  // object graphs (e.g. object literal boilerplate).
301  void EmitDeepCopy(Handle<JSObject> object,
302                    Register result,
303                    Register source,
304                    int* offset,
305                    AllocationSiteMode mode);
306
307  void EnsureSpaceForLazyDeopt(int space_needed) OVERRIDE;
308  void DoLoadKeyedExternalArray(LLoadKeyed* instr);
309  void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
310  void DoLoadKeyedFixedArray(LLoadKeyed* instr);
311  void DoStoreKeyedExternalArray(LStoreKeyed* instr);
312  void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
313  void DoStoreKeyedFixedArray(LStoreKeyed* instr);
314
315  template <class T>
316  void EmitVectorLoadICRegisters(T* instr);
317
318  void EmitReturn(LReturn* instr, bool dynamic_frame_alignment);
319
320  // Emits code for pushing either a tagged constant, a (non-double)
321  // register, or a stack slot operand.
322  void EmitPushTaggedOperand(LOperand* operand);
323
324  friend class LGapResolver;
325
326#ifdef _MSC_VER
327  // On windows, you may not access the stack more than one page below
328  // the most recently mapped page. To make the allocated area randomly
329  // accessible, we write an arbitrary value to each page in range
330  // esp + offset - page_size .. esp in turn.
331  void MakeSureStackPagesMapped(int offset);
332#endif
333
334  ZoneList<LEnvironment*> deoptimizations_;
335  ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
336  ZoneList<Handle<Object> > deoptimization_literals_;
337  int inlined_function_count_;
338  Scope* const scope_;
339  TranslationBuffer translations_;
340  ZoneList<LDeferredCode*> deferred_;
341  bool dynamic_frame_alignment_;
342  bool support_aligned_spilled_doubles_;
343  int osr_pc_offset_;
344  bool frame_is_built_;
345
346  // Builder that keeps track of safepoints in the code. The table
347  // itself is emitted at the end of the generated code.
348  SafepointTableBuilder safepoints_;
349
350  // Compiler from a set of parallel moves to a sequential list of moves.
351  LGapResolver resolver_;
352
353  Safepoint::Kind expected_safepoint_kind_;
354
355  class PushSafepointRegistersScope FINAL  BASE_EMBEDDED {
356   public:
357    explicit PushSafepointRegistersScope(LCodeGen* codegen)
358        : codegen_(codegen) {
359      DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
360      codegen_->masm_->PushSafepointRegisters();
361      codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
362      DCHECK(codegen_->info()->is_calling());
363    }
364
365    ~PushSafepointRegistersScope() {
366      DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
367      codegen_->masm_->PopSafepointRegisters();
368      codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
369    }
370
371   private:
372    LCodeGen* codegen_;
373  };
374
375  friend class LDeferredCode;
376  friend class LEnvironment;
377  friend class SafepointGenerator;
378  DISALLOW_COPY_AND_ASSIGN(LCodeGen);
379};
380
381
382class LDeferredCode : public ZoneObject {
383 public:
384  explicit LDeferredCode(LCodeGen* codegen)
385      : codegen_(codegen),
386        external_exit_(NULL),
387        instruction_index_(codegen->current_instruction_) {
388    codegen->AddDeferredCode(this);
389  }
390
391  virtual ~LDeferredCode() {}
392  virtual void Generate() = 0;
393  virtual LInstruction* instr() = 0;
394
395  void SetExit(Label* exit) { external_exit_ = exit; }
396  Label* entry() { return &entry_; }
397  Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
398  Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); }
399  int instruction_index() const { return instruction_index_; }
400
401 protected:
402  LCodeGen* codegen() const { return codegen_; }
403  MacroAssembler* masm() const { return codegen_->masm(); }
404
405 private:
406  LCodeGen* codegen_;
407  Label entry_;
408  Label exit_;
409  Label* external_exit_;
410  Label done_;
411  int instruction_index_;
412};
413
414} }  // namespace v8::internal
415
416#endif  // V8_IA32_LITHIUM_CODEGEN_IA32_H_
417