1// Copyright 2012 the V8 project authors. All rights reserved.
2// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6//     * Redistributions of source code must retain the above copyright
7//       notice, this list of conditions and the following disclaimer.
8//     * Redistributions in binary form must reproduce the above
9//       copyright notice, this list of conditions and the following
10//       disclaimer in the documentation and/or other materials provided
11//       with the distribution.
12//     * Neither the name of Google Inc. nor the names of its
13//       contributors may be used to endorse or promote products derived
14//       from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_MIPS_LITHIUM_CODEGEN_MIPS_H_
29#define V8_MIPS_LITHIUM_CODEGEN_MIPS_H_
30
31#include "mips/lithium-mips.h"
32#include "mips/lithium-gap-resolver-mips.h"
33#include "deoptimizer.h"
34#include "safepoint-table.h"
35#include "scopes.h"
36
37namespace v8 {
38namespace internal {
39
40// Forward declarations.
41class LDeferredCode;
42class SafepointGenerator;
43
44class LCodeGen BASE_EMBEDDED {
45 public:
46  LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
47      : chunk_(chunk),
48        masm_(assembler),
49        info_(info),
50        current_block_(-1),
51        current_instruction_(-1),
52        instructions_(chunk->instructions()),
53        deoptimizations_(4),
54        deopt_jump_table_(4),
55        deoptimization_literals_(8),
56        inlined_function_count_(0),
57        scope_(info->scope()),
58        status_(UNUSED),
59        deferred_(8),
60        osr_pc_offset_(-1),
61        last_lazy_deopt_pc_(0),
62        resolver_(this),
63        expected_safepoint_kind_(Safepoint::kSimple) {
64    PopulateDeoptimizationLiteralsWithInlinedFunctions();
65  }
66
67
68  // Simple accessors.
69  MacroAssembler* masm() const { return masm_; }
70  CompilationInfo* info() const { return info_; }
71  Isolate* isolate() const { return info_->isolate(); }
72  Factory* factory() const { return isolate()->factory(); }
73  Heap* heap() const { return isolate()->heap(); }
74
75  // Support for converting LOperands to assembler types.
76  // LOperand must be a register.
77  Register ToRegister(LOperand* op) const;
78
79  // LOperand is loaded into scratch, unless already a register.
80  Register EmitLoadRegister(LOperand* op, Register scratch);
81
82  // LOperand must be a double register.
83  DoubleRegister ToDoubleRegister(LOperand* op) const;
84
85  // LOperand is loaded into dbl_scratch, unless already a double register.
86  DoubleRegister EmitLoadDoubleRegister(LOperand* op,
87                                        FloatRegister flt_scratch,
88                                        DoubleRegister dbl_scratch);
89  int ToInteger32(LConstantOperand* op) const;
90  double ToDouble(LConstantOperand* op) const;
91  Operand ToOperand(LOperand* op);
92  MemOperand ToMemOperand(LOperand* op) const;
93  // Returns a MemOperand pointing to the high word of a DoubleStackSlot.
94  MemOperand ToHighMemOperand(LOperand* op) const;
95
96  bool IsInteger32(LConstantOperand* op) const;
97  Handle<Object> ToHandle(LConstantOperand* op) const;
98
99  // Try to generate code for the entire chunk, but it may fail if the
100  // chunk contains constructs we cannot handle. Returns true if the
101  // code generation attempt succeeded.
102  bool GenerateCode();
103
104  // Finish the code by setting stack height, safepoint, and bailout
105  // information on it.
106  void FinishCode(Handle<Code> code);
107
108  void DoDeferredNumberTagD(LNumberTagD* instr);
109  void DoDeferredNumberTagI(LNumberTagI* instr);
110  void DoDeferredTaggedToI(LTaggedToI* instr);
111  void DoDeferredMathAbsTaggedHeapNumber(LUnaryMathOperation* instr);
112  void DoDeferredStackCheck(LStackCheck* instr);
113  void DoDeferredRandom(LRandom* instr);
114  void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
115  void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
116  void DoDeferredAllocateObject(LAllocateObject* instr);
117  void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
118                                       Label* map_check);
119
120  void DoCheckMapCommon(Register reg, Register scratch, Handle<Map> map,
121                        CompareMapMode mode, LEnvironment* env);
122
123  // Parallel move support.
124  void DoParallelMove(LParallelMove* move);
125  void DoGap(LGap* instr);
126
127  // Emit frame translation commands for an environment.
128  void WriteTranslation(LEnvironment* environment, Translation* translation);
129
130  // Declare methods that deal with the individual node types.
131#define DECLARE_DO(type) void Do##type(L##type* node);
132  LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
133#undef DECLARE_DO
134
135 private:
136  enum Status {
137    UNUSED,
138    GENERATING,
139    DONE,
140    ABORTED
141  };
142
143  bool is_unused() const { return status_ == UNUSED; }
144  bool is_generating() const { return status_ == GENERATING; }
145  bool is_done() const { return status_ == DONE; }
146  bool is_aborted() const { return status_ == ABORTED; }
147
148  StrictModeFlag strict_mode_flag() const {
149    return info()->is_classic_mode() ? kNonStrictMode : kStrictMode;
150  }
151
152  LChunk* chunk() const { return chunk_; }
153  Scope* scope() const { return scope_; }
154  HGraph* graph() const { return chunk_->graph(); }
155
156  Register scratch0() { return kLithiumScratchReg; }
157  Register scratch1() { return kLithiumScratchReg2; }
158  DoubleRegister double_scratch0() { return kLithiumScratchDouble; }
159
160  int GetNextEmittedBlock(int block);
161  LInstruction* GetNextInstruction();
162
163  void EmitClassOfTest(Label* if_true,
164                       Label* if_false,
165                       Handle<String> class_name,
166                       Register input,
167                       Register temporary,
168                       Register temporary2);
169
170  int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
171  int GetParameterCount() const { return scope()->num_parameters(); }
172
173  void Abort(const char* format, ...);
174  void Comment(const char* format, ...);
175
176  void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code); }
177
178  // Code generation passes.  Returns true if code generation should
179  // continue.
180  bool GeneratePrologue();
181  bool GenerateBody();
182  bool GenerateDeferredCode();
183  bool GenerateDeoptJumpTable();
184  bool GenerateSafepointTable();
185
186  enum SafepointMode {
187    RECORD_SIMPLE_SAFEPOINT,
188    RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
189  };
190
191  void CallCode(Handle<Code> code,
192                RelocInfo::Mode mode,
193                LInstruction* instr);
194
195  void CallCodeGeneric(Handle<Code> code,
196                       RelocInfo::Mode mode,
197                       LInstruction* instr,
198                       SafepointMode safepoint_mode);
199
200  void CallRuntime(const Runtime::Function* function,
201                   int num_arguments,
202                   LInstruction* instr);
203
204  void CallRuntime(Runtime::FunctionId id,
205                   int num_arguments,
206                   LInstruction* instr) {
207    const Runtime::Function* function = Runtime::FunctionForId(id);
208    CallRuntime(function, num_arguments, instr);
209  }
210
211  void CallRuntimeFromDeferred(Runtime::FunctionId id,
212                               int argc,
213                               LInstruction* instr);
214
215  // Generate a direct call to a known function.  Expects the function
216  // to be in a1.
217  void CallKnownFunction(Handle<JSFunction> function,
218                         int arity,
219                         LInstruction* instr,
220                         CallKind call_kind);
221
222  void LoadHeapObject(Register result, Handle<HeapObject> object);
223
224  void RecordSafepointWithLazyDeopt(LInstruction* instr,
225                                    SafepointMode safepoint_mode);
226
227  void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
228                                            Safepoint::DeoptMode mode);
229  void DeoptimizeIf(Condition cc,
230                    LEnvironment* environment,
231                    Register src1 = zero_reg,
232                    const Operand& src2 = Operand(zero_reg));
233
234  void AddToTranslation(Translation* translation,
235                        LOperand* op,
236                        bool is_tagged);
237  void PopulateDeoptimizationData(Handle<Code> code);
238  int DefineDeoptimizationLiteral(Handle<Object> literal);
239
240  void PopulateDeoptimizationLiteralsWithInlinedFunctions();
241
242  Register ToRegister(int index) const;
243  DoubleRegister ToDoubleRegister(int index) const;
244
245  // Specific math operations - used from DoUnaryMathOperation.
246  void EmitIntegerMathAbs(LUnaryMathOperation* instr);
247  void DoMathAbs(LUnaryMathOperation* instr);
248  void DoMathFloor(LUnaryMathOperation* instr);
249  void DoMathRound(LUnaryMathOperation* instr);
250  void DoMathSqrt(LUnaryMathOperation* instr);
251  void DoMathPowHalf(LUnaryMathOperation* instr);
252  void DoMathLog(LUnaryMathOperation* instr);
253  void DoMathTan(LUnaryMathOperation* instr);
254  void DoMathCos(LUnaryMathOperation* instr);
255  void DoMathSin(LUnaryMathOperation* instr);
256
257  // Support for recording safepoint and position information.
258  void RecordSafepoint(LPointerMap* pointers,
259                       Safepoint::Kind kind,
260                       int arguments,
261                       Safepoint::DeoptMode mode);
262  void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
263  void RecordSafepoint(Safepoint::DeoptMode mode);
264  void RecordSafepointWithRegisters(LPointerMap* pointers,
265                                    int arguments,
266                                    Safepoint::DeoptMode mode);
267  void RecordSafepointWithRegistersAndDoubles(LPointerMap* pointers,
268                                              int arguments,
269                                              Safepoint::DeoptMode mode);
270  void RecordPosition(int position);
271
272  static Condition TokenToCondition(Token::Value op, bool is_unsigned);
273  void EmitGoto(int block);
274  void EmitBranch(int left_block,
275                  int right_block,
276                  Condition cc,
277                  Register src1,
278                  const Operand& src2);
279  void EmitBranchF(int left_block,
280                   int right_block,
281                   Condition cc,
282                   FPURegister src1,
283                   FPURegister src2);
284  void EmitCmpI(LOperand* left, LOperand* right);
285  void EmitNumberUntagD(Register input,
286                        DoubleRegister result,
287                        bool deoptimize_on_undefined,
288                        bool deoptimize_on_minus_zero,
289                        LEnvironment* env);
290
291  // Emits optimized code for typeof x == "y".  Modifies input register.
292  // Returns the condition on which a final split to
293  // true and false label should be made, to optimize fallthrough.
294  // Returns two registers in cmp1 and cmp2 that can be used in the
295  // Branch instruction after EmitTypeofIs.
296  Condition EmitTypeofIs(Label* true_label,
297                         Label* false_label,
298                         Register input,
299                         Handle<String> type_name,
300                         Register& cmp1,
301                         Operand& cmp2);
302
303  // Emits optimized code for %_IsObject(x).  Preserves input register.
304  // Returns the condition on which a final split to
305  // true and false label should be made, to optimize fallthrough.
306  Condition EmitIsObject(Register input,
307                         Register temp1,
308                         Register temp2,
309                         Label* is_not_object,
310                         Label* is_object);
311
312  // Emits optimized code for %_IsString(x).  Preserves input register.
313  // Returns the condition on which a final split to
314  // true and false label should be made, to optimize fallthrough.
315  Condition EmitIsString(Register input,
316                         Register temp1,
317                         Label* is_not_string);
318
319  // Emits optimized code for %_IsConstructCall().
320  // Caller should branch on equal condition.
321  void EmitIsConstructCall(Register temp1, Register temp2);
322
323  void EmitLoadFieldOrConstantFunction(Register result,
324                                       Register object,
325                                       Handle<Map> type,
326                                       Handle<String> name);
327
328  // Emits optimized code to deep-copy the contents of statically known
329  // object graphs (e.g. object literal boilerplate).
330  void EmitDeepCopy(Handle<JSObject> object,
331                    Register result,
332                    Register source,
333                    int* offset);
334
335  struct JumpTableEntry {
336    explicit inline JumpTableEntry(Address entry)
337        : label(),
338          address(entry) { }
339    Label label;
340    Address address;
341  };
342
343  void EnsureSpaceForLazyDeopt();
344
345  LChunk* const chunk_;
346  MacroAssembler* const masm_;
347  CompilationInfo* const info_;
348
349  int current_block_;
350  int current_instruction_;
351  const ZoneList<LInstruction*>* instructions_;
352  ZoneList<LEnvironment*> deoptimizations_;
353  ZoneList<JumpTableEntry> deopt_jump_table_;
354  ZoneList<Handle<Object> > deoptimization_literals_;
355  int inlined_function_count_;
356  Scope* const scope_;
357  Status status_;
358  TranslationBuffer translations_;
359  ZoneList<LDeferredCode*> deferred_;
360  int osr_pc_offset_;
361  int last_lazy_deopt_pc_;
362
363  // Builder that keeps track of safepoints in the code. The table
364  // itself is emitted at the end of the generated code.
365  SafepointTableBuilder safepoints_;
366
367  // Compiler from a set of parallel moves to a sequential list of moves.
368  LGapResolver resolver_;
369
370  Safepoint::Kind expected_safepoint_kind_;
371
372  class PushSafepointRegistersScope BASE_EMBEDDED {
373   public:
374    PushSafepointRegistersScope(LCodeGen* codegen,
375                                Safepoint::Kind kind)
376        : codegen_(codegen) {
377      ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
378      codegen_->expected_safepoint_kind_ = kind;
379
380      switch (codegen_->expected_safepoint_kind_) {
381        case Safepoint::kWithRegisters:
382          codegen_->masm_->PushSafepointRegisters();
383          break;
384        case Safepoint::kWithRegistersAndDoubles:
385          codegen_->masm_->PushSafepointRegistersAndDoubles();
386          break;
387        default:
388          UNREACHABLE();
389      }
390    }
391
392    ~PushSafepointRegistersScope() {
393      Safepoint::Kind kind = codegen_->expected_safepoint_kind_;
394      ASSERT((kind & Safepoint::kWithRegisters) != 0);
395      switch (kind) {
396        case Safepoint::kWithRegisters:
397          codegen_->masm_->PopSafepointRegisters();
398          break;
399        case Safepoint::kWithRegistersAndDoubles:
400          codegen_->masm_->PopSafepointRegistersAndDoubles();
401          break;
402        default:
403          UNREACHABLE();
404      }
405      codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
406    }
407
408   private:
409    LCodeGen* codegen_;
410  };
411
412  friend class LDeferredCode;
413  friend class LEnvironment;
414  friend class SafepointGenerator;
415  DISALLOW_COPY_AND_ASSIGN(LCodeGen);
416};
417
418
419class LDeferredCode: public ZoneObject {
420 public:
421  explicit LDeferredCode(LCodeGen* codegen)
422      : codegen_(codegen),
423        external_exit_(NULL),
424        instruction_index_(codegen->current_instruction_) {
425    codegen->AddDeferredCode(this);
426  }
427
428  virtual ~LDeferredCode() { }
429  virtual void Generate() = 0;
430  virtual LInstruction* instr() = 0;
431
432  void SetExit(Label* exit) { external_exit_ = exit; }
433  Label* entry() { return &entry_; }
434  Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
435  int instruction_index() const { return instruction_index_; }
436
437 protected:
438  LCodeGen* codegen() const { return codegen_; }
439  MacroAssembler* masm() const { return codegen_->masm(); }
440
441 private:
442  LCodeGen* codegen_;
443  Label entry_;
444  Label exit_;
445  Label* external_exit_;
446  int instruction_index_;
447};
448
449} }  // namespace v8::internal
450
451#endif  // V8_MIPS_LITHIUM_CODEGEN_MIPS_H_
452