1/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_OPTIMIZING_CODE_GENERATOR_MIPS64_H_
18#define ART_COMPILER_OPTIMIZING_CODE_GENERATOR_MIPS64_H_
19
20#include "code_generator.h"
21#include "dex/compiler_enums.h"
22#include "driver/compiler_options.h"
23#include "nodes.h"
24#include "parallel_move_resolver.h"
25#include "utils/mips64/assembler_mips64.h"
26
27namespace art {
28namespace mips64 {
29
30// InvokeDexCallingConvention registers
31
32static constexpr GpuRegister kParameterCoreRegisters[] =
33    { A1, A2, A3, A4, A5, A6, A7 };
34static constexpr size_t kParameterCoreRegistersLength = arraysize(kParameterCoreRegisters);
35
36static constexpr FpuRegister kParameterFpuRegisters[] =
37    { F13, F14, F15, F16, F17, F18, F19 };
38static constexpr size_t kParameterFpuRegistersLength = arraysize(kParameterFpuRegisters);
39
40
41// InvokeRuntimeCallingConvention registers
42
43static constexpr GpuRegister kRuntimeParameterCoreRegisters[] =
44    { A0, A1, A2, A3, A4, A5, A6, A7 };
45static constexpr size_t kRuntimeParameterCoreRegistersLength =
46    arraysize(kRuntimeParameterCoreRegisters);
47
48static constexpr FpuRegister kRuntimeParameterFpuRegisters[] =
49    { F12, F13, F14, F15, F16, F17, F18, F19 };
50static constexpr size_t kRuntimeParameterFpuRegistersLength =
51    arraysize(kRuntimeParameterFpuRegisters);
52
53
54static constexpr GpuRegister kCoreCalleeSaves[] =
55    { S0, S1, S2, S3, S4, S5, S6, S7, GP, S8, RA };  // TODO: review
56static constexpr FpuRegister kFpuCalleeSaves[] =
57    { F24, F25, F26, F27, F28, F29, F30, F31 };
58
59
60class CodeGeneratorMIPS64;
61
62class InvokeDexCallingConvention : public CallingConvention<GpuRegister, FpuRegister> {
63 public:
64  InvokeDexCallingConvention()
65      : CallingConvention(kParameterCoreRegisters,
66                          kParameterCoreRegistersLength,
67                          kParameterFpuRegisters,
68                          kParameterFpuRegistersLength,
69                          kMips64PointerSize) {}
70
71 private:
72  DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConvention);
73};
74
75class InvokeDexCallingConventionVisitorMIPS64 : public InvokeDexCallingConventionVisitor {
76 public:
77  InvokeDexCallingConventionVisitorMIPS64() {}
78  virtual ~InvokeDexCallingConventionVisitorMIPS64() {}
79
80  Location GetNextLocation(Primitive::Type type) OVERRIDE;
81  Location GetReturnLocation(Primitive::Type type) const OVERRIDE;
82  Location GetMethodLocation() const OVERRIDE;
83
84 private:
85  InvokeDexCallingConvention calling_convention;
86
87  DISALLOW_COPY_AND_ASSIGN(InvokeDexCallingConventionVisitorMIPS64);
88};
89
90class InvokeRuntimeCallingConvention : public CallingConvention<GpuRegister, FpuRegister> {
91 public:
92  InvokeRuntimeCallingConvention()
93      : CallingConvention(kRuntimeParameterCoreRegisters,
94                          kRuntimeParameterCoreRegistersLength,
95                          kRuntimeParameterFpuRegisters,
96                          kRuntimeParameterFpuRegistersLength,
97                          kMips64PointerSize) {}
98
99  Location GetReturnLocation(Primitive::Type return_type);
100
101 private:
102  DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention);
103};
104
105class FieldAccessCallingConventionMIPS64 : public FieldAccessCallingConvention {
106 public:
107  FieldAccessCallingConventionMIPS64() {}
108
109  Location GetObjectLocation() const OVERRIDE {
110    return Location::RegisterLocation(A1);
111  }
112  Location GetFieldIndexLocation() const OVERRIDE {
113    return Location::RegisterLocation(A0);
114  }
115  Location GetReturnLocation(Primitive::Type type ATTRIBUTE_UNUSED) const OVERRIDE {
116    return Location::RegisterLocation(V0);
117  }
118  Location GetSetValueLocation(Primitive::Type type, bool is_instance) const OVERRIDE {
119    return Primitive::Is64BitType(type)
120        ? Location::RegisterLocation(A2)
121        : (is_instance
122            ? Location::RegisterLocation(A2)
123            : Location::RegisterLocation(A1));
124  }
125  Location GetFpuLocation(Primitive::Type type ATTRIBUTE_UNUSED) const OVERRIDE {
126    return Location::FpuRegisterLocation(F0);
127  }
128
129 private:
130  DISALLOW_COPY_AND_ASSIGN(FieldAccessCallingConventionMIPS64);
131};
132
133class ParallelMoveResolverMIPS64 : public ParallelMoveResolverWithSwap {
134 public:
135  ParallelMoveResolverMIPS64(ArenaAllocator* allocator, CodeGeneratorMIPS64* codegen)
136      : ParallelMoveResolverWithSwap(allocator), codegen_(codegen) {}
137
138  void EmitMove(size_t index) OVERRIDE;
139  void EmitSwap(size_t index) OVERRIDE;
140  void SpillScratch(int reg) OVERRIDE;
141  void RestoreScratch(int reg) OVERRIDE;
142
143  void Exchange(int index1, int index2, bool double_slot);
144
145  Mips64Assembler* GetAssembler() const;
146
147 private:
148  CodeGeneratorMIPS64* const codegen_;
149
150  DISALLOW_COPY_AND_ASSIGN(ParallelMoveResolverMIPS64);
151};
152
153class SlowPathCodeMIPS64 : public SlowPathCode {
154 public:
155  explicit SlowPathCodeMIPS64(HInstruction* instruction)
156      : SlowPathCode(instruction), entry_label_(), exit_label_() {}
157
158  Mips64Label* GetEntryLabel() { return &entry_label_; }
159  Mips64Label* GetExitLabel() { return &exit_label_; }
160
161 private:
162  Mips64Label entry_label_;
163  Mips64Label exit_label_;
164
165  DISALLOW_COPY_AND_ASSIGN(SlowPathCodeMIPS64);
166};
167
168class LocationsBuilderMIPS64 : public HGraphVisitor {
169 public:
170  LocationsBuilderMIPS64(HGraph* graph, CodeGeneratorMIPS64* codegen)
171      : HGraphVisitor(graph), codegen_(codegen) {}
172
173#define DECLARE_VISIT_INSTRUCTION(name, super)     \
174  void Visit##name(H##name* instr) OVERRIDE;
175
176  FOR_EACH_CONCRETE_INSTRUCTION_COMMON(DECLARE_VISIT_INSTRUCTION)
177  FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(DECLARE_VISIT_INSTRUCTION)
178
179#undef DECLARE_VISIT_INSTRUCTION
180
181  void VisitInstruction(HInstruction* instruction) OVERRIDE {
182    LOG(FATAL) << "Unreachable instruction " << instruction->DebugName()
183               << " (id " << instruction->GetId() << ")";
184  }
185
186 private:
187  void HandleInvoke(HInvoke* invoke);
188  void HandleBinaryOp(HBinaryOperation* operation);
189  void HandleCondition(HCondition* instruction);
190  void HandleShift(HBinaryOperation* operation);
191  void HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info);
192  void HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info);
193
194  InvokeDexCallingConventionVisitorMIPS64 parameter_visitor_;
195
196  CodeGeneratorMIPS64* const codegen_;
197
198  DISALLOW_COPY_AND_ASSIGN(LocationsBuilderMIPS64);
199};
200
201class InstructionCodeGeneratorMIPS64 : public InstructionCodeGenerator {
202 public:
203  InstructionCodeGeneratorMIPS64(HGraph* graph, CodeGeneratorMIPS64* codegen);
204
205#define DECLARE_VISIT_INSTRUCTION(name, super)     \
206  void Visit##name(H##name* instr) OVERRIDE;
207
208  FOR_EACH_CONCRETE_INSTRUCTION_COMMON(DECLARE_VISIT_INSTRUCTION)
209  FOR_EACH_CONCRETE_INSTRUCTION_MIPS64(DECLARE_VISIT_INSTRUCTION)
210
211#undef DECLARE_VISIT_INSTRUCTION
212
213  void VisitInstruction(HInstruction* instruction) OVERRIDE {
214    LOG(FATAL) << "Unreachable instruction " << instruction->DebugName()
215               << " (id " << instruction->GetId() << ")";
216  }
217
218  Mips64Assembler* GetAssembler() const { return assembler_; }
219
220 private:
221  void GenerateClassInitializationCheck(SlowPathCodeMIPS64* slow_path, GpuRegister class_reg);
222  void GenerateMemoryBarrier(MemBarrierKind kind);
223  void GenerateSuspendCheck(HSuspendCheck* check, HBasicBlock* successor);
224  void HandleBinaryOp(HBinaryOperation* operation);
225  void HandleCondition(HCondition* instruction);
226  void HandleShift(HBinaryOperation* operation);
227  void HandleFieldSet(HInstruction* instruction,
228                      const FieldInfo& field_info,
229                      bool value_can_be_null);
230  void HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info);
231  void GenerateTestAndBranch(HInstruction* instruction,
232                             size_t condition_input_index,
233                             Mips64Label* true_target,
234                             Mips64Label* false_target);
235  void DivRemOneOrMinusOne(HBinaryOperation* instruction);
236  void DivRemByPowerOfTwo(HBinaryOperation* instruction);
237  void GenerateDivRemWithAnyConstant(HBinaryOperation* instruction);
238  void GenerateDivRemIntegral(HBinaryOperation* instruction);
239  void GenerateIntLongCompare(IfCondition cond, bool is64bit, LocationSummary* locations);
240  void GenerateIntLongCompareAndBranch(IfCondition cond,
241                                       bool is64bit,
242                                       LocationSummary* locations,
243                                       Mips64Label* label);
244  void GenerateFpCompareAndBranch(IfCondition cond,
245                                  bool gt_bias,
246                                  Primitive::Type type,
247                                  LocationSummary* locations,
248                                  Mips64Label* label);
249  void HandleGoto(HInstruction* got, HBasicBlock* successor);
250
251  Mips64Assembler* const assembler_;
252  CodeGeneratorMIPS64* const codegen_;
253
254  DISALLOW_COPY_AND_ASSIGN(InstructionCodeGeneratorMIPS64);
255};
256
257class CodeGeneratorMIPS64 : public CodeGenerator {
258 public:
259  CodeGeneratorMIPS64(HGraph* graph,
260                      const Mips64InstructionSetFeatures& isa_features,
261                      const CompilerOptions& compiler_options,
262                      OptimizingCompilerStats* stats = nullptr);
263  virtual ~CodeGeneratorMIPS64() {}
264
265  void GenerateFrameEntry() OVERRIDE;
266  void GenerateFrameExit() OVERRIDE;
267
268  void Bind(HBasicBlock* block) OVERRIDE;
269
270  size_t GetWordSize() const OVERRIDE { return kMips64DoublewordSize; }
271
272  size_t GetFloatingPointSpillSlotSize() const OVERRIDE { return kMips64DoublewordSize; }
273
274  uintptr_t GetAddressOf(HBasicBlock* block) OVERRIDE {
275    return assembler_.GetLabelLocation(GetLabelOf(block));
276  }
277
278  HGraphVisitor* GetLocationBuilder() OVERRIDE { return &location_builder_; }
279  HGraphVisitor* GetInstructionVisitor() OVERRIDE { return &instruction_visitor_; }
280  Mips64Assembler* GetAssembler() OVERRIDE { return &assembler_; }
281  const Mips64Assembler& GetAssembler() const OVERRIDE { return assembler_; }
282
283  void MarkGCCard(GpuRegister object, GpuRegister value, bool value_can_be_null);
284
285  // Register allocation.
286
287  void SetupBlockedRegisters() const OVERRIDE;
288
289  size_t SaveCoreRegister(size_t stack_index, uint32_t reg_id);
290  size_t RestoreCoreRegister(size_t stack_index, uint32_t reg_id);
291  size_t SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id);
292  size_t RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id);
293
294  void DumpCoreRegister(std::ostream& stream, int reg) const OVERRIDE;
295  void DumpFloatingPointRegister(std::ostream& stream, int reg) const OVERRIDE;
296
297  InstructionSet GetInstructionSet() const OVERRIDE { return InstructionSet::kMips64; }
298
299  const Mips64InstructionSetFeatures& GetInstructionSetFeatures() const {
300    return isa_features_;
301  }
302
303  Mips64Label* GetLabelOf(HBasicBlock* block) const {
304    return CommonGetLabelOf<Mips64Label>(block_labels_, block);
305  }
306
307  void Initialize() OVERRIDE {
308    block_labels_ = CommonInitializeLabels<Mips64Label>();
309  }
310
311  void Finalize(CodeAllocator* allocator) OVERRIDE;
312
313  // Code generation helpers.
314  void MoveLocation(Location dst, Location src, Primitive::Type dst_type) OVERRIDE;
315
316  void MoveConstant(Location destination, int32_t value) OVERRIDE;
317
318  void AddLocationAsTemp(Location location, LocationSummary* locations) OVERRIDE;
319
320
321  void SwapLocations(Location loc1, Location loc2, Primitive::Type type);
322
323  // Generate code to invoke a runtime entry point.
324  void InvokeRuntime(QuickEntrypointEnum entrypoint,
325                     HInstruction* instruction,
326                     uint32_t dex_pc,
327                     SlowPathCode* slow_path) OVERRIDE;
328
329  void InvokeRuntime(int32_t offset,
330                     HInstruction* instruction,
331                     uint32_t dex_pc,
332                     SlowPathCode* slow_path);
333
334  ParallelMoveResolver* GetMoveResolver() OVERRIDE { return &move_resolver_; }
335
336  bool NeedsTwoRegisters(Primitive::Type type ATTRIBUTE_UNUSED) const { return false; }
337
338  // Check if the desired_string_load_kind is supported. If it is, return it,
339  // otherwise return a fall-back kind that should be used instead.
340  HLoadString::LoadKind GetSupportedLoadStringKind(
341      HLoadString::LoadKind desired_string_load_kind) OVERRIDE;
342
343  // Check if the desired_dispatch_info is supported. If it is, return it,
344  // otherwise return a fall-back info that should be used instead.
345  HInvokeStaticOrDirect::DispatchInfo GetSupportedInvokeStaticOrDirectDispatch(
346      const HInvokeStaticOrDirect::DispatchInfo& desired_dispatch_info,
347      MethodReference target_method) OVERRIDE;
348
349  void GenerateStaticOrDirectCall(HInvokeStaticOrDirect* invoke, Location temp) OVERRIDE;
350  void GenerateVirtualCall(HInvokeVirtual* invoke, Location temp) OVERRIDE;
351
352  void MoveFromReturnRegister(Location trg ATTRIBUTE_UNUSED,
353                              Primitive::Type type ATTRIBUTE_UNUSED) OVERRIDE {
354    UNIMPLEMENTED(FATAL) << "Not implemented on MIPS64";
355  }
356
357  void GenerateNop();
358  void GenerateImplicitNullCheck(HNullCheck* instruction);
359  void GenerateExplicitNullCheck(HNullCheck* instruction);
360
361 private:
362  // Labels for each block that will be compiled.
363  Mips64Label* block_labels_;  // Indexed by block id.
364  Mips64Label frame_entry_label_;
365  LocationsBuilderMIPS64 location_builder_;
366  InstructionCodeGeneratorMIPS64 instruction_visitor_;
367  ParallelMoveResolverMIPS64 move_resolver_;
368  Mips64Assembler assembler_;
369  const Mips64InstructionSetFeatures& isa_features_;
370
371  DISALLOW_COPY_AND_ASSIGN(CodeGeneratorMIPS64);
372};
373
374}  // namespace mips64
375}  // namespace art
376
377#endif  // ART_COMPILER_OPTIMIZING_CODE_GENERATOR_MIPS64_H_
378