codegen_arm64.h revision 23abec955e2e733999a1e2c30e4e384e46e5dde4
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_DEX_QUICK_ARM64_CODEGEN_ARM64_H_
18#define ART_COMPILER_DEX_QUICK_ARM64_CODEGEN_ARM64_H_
19
20#include "arm64_lir.h"
21#include "dex/compiler_internals.h"
22
23#include <map>
24
25namespace art {
26
27class Arm64Mir2Lir FINAL : public Mir2Lir {
28 protected:
29  // TODO: consolidate 64-bit target support.
30  class InToRegStorageMapper {
31   public:
32    virtual RegStorage GetNextReg(bool is_double_or_float, bool is_wide, bool is_ref) = 0;
33    virtual ~InToRegStorageMapper() {}
34  };
35
36  class InToRegStorageArm64Mapper : public InToRegStorageMapper {
37   public:
38    InToRegStorageArm64Mapper() : cur_core_reg_(0), cur_fp_reg_(0) {}
39    virtual ~InToRegStorageArm64Mapper() {}
40    virtual RegStorage GetNextReg(bool is_double_or_float, bool is_wide, bool is_ref);
41   private:
42    int cur_core_reg_;
43    int cur_fp_reg_;
44  };
45
46  class InToRegStorageMapping {
47   public:
48    InToRegStorageMapping() : max_mapped_in_(0), is_there_stack_mapped_(false),
49    initialized_(false) {}
50    void Initialize(RegLocation* arg_locs, int count, InToRegStorageMapper* mapper);
51    int GetMaxMappedIn() { return max_mapped_in_; }
52    bool IsThereStackMapped() { return is_there_stack_mapped_; }
53    RegStorage Get(int in_position);
54    bool IsInitialized() { return initialized_; }
55   private:
56    std::map<int, RegStorage> mapping_;
57    int max_mapped_in_;
58    bool is_there_stack_mapped_;
59    bool initialized_;
60  };
61
62  public:
63    Arm64Mir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena);
64
65    // Required for target - codegen helpers.
66    bool SmallLiteralDivRem(Instruction::Code dalvik_opcode, bool is_div, RegLocation rl_src,
67                            RegLocation rl_dest, int lit) OVERRIDE;
68    bool HandleEasyDivRem(Instruction::Code dalvik_opcode, bool is_div,
69                          RegLocation rl_src, RegLocation rl_dest, int lit) OVERRIDE;
70    bool EasyMultiply(RegLocation rl_src, RegLocation rl_dest, int lit) OVERRIDE;
71    LIR* CheckSuspendUsingLoad() OVERRIDE;
72    RegStorage LoadHelper(ThreadOffset<4> offset) OVERRIDE;
73    RegStorage LoadHelper(ThreadOffset<8> offset) OVERRIDE;
74    LIR* LoadBaseDisp(RegStorage r_base, int displacement, RegStorage r_dest,
75                      OpSize size, VolatileKind is_volatile) OVERRIDE;
76    LIR* LoadRefDisp(RegStorage r_base, int displacement, RegStorage r_dest,
77                     VolatileKind is_volatile)
78        OVERRIDE;
79    LIR* LoadBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_dest, int scale,
80                         OpSize size) OVERRIDE;
81    LIR* LoadRefIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_dest) OVERRIDE;
82    LIR* LoadBaseIndexedDisp(RegStorage r_base, RegStorage r_index, int scale, int displacement,
83                             RegStorage r_dest, OpSize size) OVERRIDE;
84    LIR* LoadConstantNoClobber(RegStorage r_dest, int value);
85    LIR* LoadConstantWide(RegStorage r_dest, int64_t value);
86    LIR* StoreBaseDisp(RegStorage r_base, int displacement, RegStorage r_src,
87                       OpSize size, VolatileKind is_volatile) OVERRIDE;
88    LIR* StoreRefDisp(RegStorage r_base, int displacement, RegStorage r_src,
89                      VolatileKind is_volatile) OVERRIDE;
90    LIR* StoreBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_src, int scale,
91                          OpSize size) OVERRIDE;
92    LIR* StoreRefIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_src) OVERRIDE;
93    LIR* StoreBaseIndexedDisp(RegStorage r_base, RegStorage r_index, int scale, int displacement,
94                              RegStorage r_src, OpSize size) OVERRIDE;
95    void MarkGCCard(RegStorage val_reg, RegStorage tgt_addr_reg) OVERRIDE;
96    LIR* OpCmpMemImmBranch(ConditionCode cond, RegStorage temp_reg, RegStorage base_reg,
97                           int offset, int check_value, LIR* target) OVERRIDE;
98
99    // Required for target - register utilities.
100    RegStorage TargetReg(SpecialTargetRegister reg) OVERRIDE;
101    RegStorage TargetReg(SpecialTargetRegister symbolic_reg, bool is_wide) OVERRIDE {
102      RegStorage reg = TargetReg(symbolic_reg);
103      if (is_wide) {
104        return (reg.Is64Bit()) ? reg : As64BitReg(reg);
105      } else {
106        return (reg.Is32Bit()) ? reg : As32BitReg(reg);
107      }
108    }
109    RegStorage TargetRefReg(SpecialTargetRegister symbolic_reg) OVERRIDE {
110      RegStorage reg = TargetReg(symbolic_reg);
111      return (reg.Is64Bit() ? reg : As64BitReg(reg));
112    }
113    RegStorage TargetPtrReg(SpecialTargetRegister symbolic_reg) OVERRIDE {
114      RegStorage reg = TargetReg(symbolic_reg);
115      return (reg.Is64Bit() ? reg : As64BitReg(reg));
116    }
117    RegStorage GetArgMappingToPhysicalReg(int arg_num);
118    RegLocation GetReturnAlt();
119    RegLocation GetReturnWideAlt();
120    RegLocation LocCReturn();
121    RegLocation LocCReturnRef();
122    RegLocation LocCReturnDouble();
123    RegLocation LocCReturnFloat();
124    RegLocation LocCReturnWide();
125    ResourceMask GetRegMaskCommon(const RegStorage& reg) const OVERRIDE;
126    void AdjustSpillMask();
127    void ClobberCallerSave();
128    void FreeCallTemps();
129    void LockCallTemps();
130    void CompilerInitializeRegAlloc();
131
132    // Required for target - miscellaneous.
133    void AssembleLIR();
134    uint32_t LinkFixupInsns(LIR* head_lir, LIR* tail_lir, CodeOffset offset);
135    int AssignInsnOffsets();
136    void AssignOffsets();
137    uint8_t* EncodeLIRs(uint8_t* write_pos, LIR* lir);
138    void DumpResourceMask(LIR* lir, const ResourceMask& mask, const char* prefix) OVERRIDE;
139    void SetupTargetResourceMasks(LIR* lir, uint64_t flags,
140                                  ResourceMask* use_mask, ResourceMask* def_mask) OVERRIDE;
141    const char* GetTargetInstFmt(int opcode);
142    const char* GetTargetInstName(int opcode);
143    std::string BuildInsnString(const char* fmt, LIR* lir, unsigned char* base_addr);
144    ResourceMask GetPCUseDefEncoding() const OVERRIDE;
145    uint64_t GetTargetInstFlags(int opcode);
146    size_t GetInsnSize(LIR* lir) OVERRIDE;
147    bool IsUnconditionalBranch(LIR* lir);
148
149    // Check support for volatile load/store of a given size.
150    bool SupportsVolatileLoadStore(OpSize size) OVERRIDE;
151    // Get the register class for load/store of a field.
152    RegisterClass RegClassForFieldLoadStore(OpSize size, bool is_volatile) OVERRIDE;
153
154    // Required for target - Dalvik-level generators.
155    void GenShiftOpLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
156                        RegLocation lr_shift);
157    void GenArithImmOpLong(Instruction::Code opcode, RegLocation rl_dest,
158                           RegLocation rl_src1, RegLocation rl_src2);
159    void GenArrayGet(int opt_flags, OpSize size, RegLocation rl_array,
160                     RegLocation rl_index, RegLocation rl_dest, int scale);
161    void GenArrayPut(int opt_flags, OpSize size, RegLocation rl_array, RegLocation rl_index,
162                     RegLocation rl_src, int scale, bool card_mark);
163    void GenShiftImmOpLong(Instruction::Code opcode, RegLocation rl_dest,
164                           RegLocation rl_src1, RegLocation rl_shift);
165    void GenLongOp(OpKind op, RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2);
166    void GenMulLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
167                    RegLocation rl_src2);
168    void GenAddLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
169                    RegLocation rl_src2);
170    void GenAndLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
171                    RegLocation rl_src2);
172    void GenArithOpDouble(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
173                          RegLocation rl_src2);
174    void GenArithOpFloat(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
175                         RegLocation rl_src2);
176    void GenCmpFP(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
177                  RegLocation rl_src2);
178    void GenConversion(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src);
179    bool GenInlinedReverseBits(CallInfo* info, OpSize size);
180    bool GenInlinedCas(CallInfo* info, bool is_long, bool is_object);
181    bool GenInlinedMinMax(CallInfo* info, bool is_min, bool is_long);
182    bool GenInlinedMinMaxFP(CallInfo* info, bool is_min, bool is_double);
183    bool GenInlinedSqrt(CallInfo* info);
184    bool GenInlinedPeek(CallInfo* info, OpSize size);
185    bool GenInlinedPoke(CallInfo* info, OpSize size);
186    bool GenInlinedAbsLong(CallInfo* info);
187    void GenIntToLong(RegLocation rl_dest, RegLocation rl_src);
188    void GenNotLong(RegLocation rl_dest, RegLocation rl_src);
189    void GenNegLong(RegLocation rl_dest, RegLocation rl_src);
190    void GenOrLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
191                   RegLocation rl_src2);
192    void GenSubLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
193                    RegLocation rl_src2);
194    void GenXorLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
195                    RegLocation rl_src2);
196    void GenDivRemLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
197                       RegLocation rl_src2, bool is_div);
198    RegLocation GenDivRem(RegLocation rl_dest, RegStorage reg_lo, RegStorage reg_hi, bool is_div);
199    RegLocation GenDivRemLit(RegLocation rl_dest, RegStorage reg_lo, int lit, bool is_div);
200    void GenCmpLong(RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2);
201    void GenDivZeroCheckWide(RegStorage reg);
202    void GenEntrySequence(RegLocation* ArgLocs, RegLocation rl_method);
203    void GenExitSequence();
204    void GenSpecialExitSequence();
205    void GenFillArrayData(DexOffset table_offset, RegLocation rl_src);
206    void GenFusedFPCmpBranch(BasicBlock* bb, MIR* mir, bool gt_bias, bool is_double);
207    void GenFusedLongCmpBranch(BasicBlock* bb, MIR* mir);
208    void GenSelect(BasicBlock* bb, MIR* mir);
209    bool GenMemBarrier(MemBarrierKind barrier_kind);
210    void GenMonitorEnter(int opt_flags, RegLocation rl_src);
211    void GenMonitorExit(int opt_flags, RegLocation rl_src);
212    void GenMoveException(RegLocation rl_dest);
213    void GenMultiplyByTwoBitMultiplier(RegLocation rl_src, RegLocation rl_result, int lit,
214                                       int first_bit, int second_bit);
215    void GenNegDouble(RegLocation rl_dest, RegLocation rl_src);
216    void GenNegFloat(RegLocation rl_dest, RegLocation rl_src);
217    void GenPackedSwitch(MIR* mir, DexOffset table_offset, RegLocation rl_src);
218    void GenSparseSwitch(MIR* mir, DexOffset table_offset, RegLocation rl_src);
219
220    uint32_t GenPairWise(uint32_t reg_mask, int* reg1, int* reg2);
221    void UnSpillCoreRegs(RegStorage base, int offset, uint32_t reg_mask);
222    void SpillCoreRegs(RegStorage base, int offset, uint32_t reg_mask);
223    void UnSpillFPRegs(RegStorage base, int offset, uint32_t reg_mask);
224    void SpillFPRegs(RegStorage base, int offset, uint32_t reg_mask);
225
226    // Required for target - single operation generators.
227    LIR* OpUnconditionalBranch(LIR* target);
228    LIR* OpCmpBranch(ConditionCode cond, RegStorage src1, RegStorage src2, LIR* target);
229    LIR* OpCmpImmBranch(ConditionCode cond, RegStorage reg, int check_value, LIR* target);
230    LIR* OpCondBranch(ConditionCode cc, LIR* target);
231    LIR* OpDecAndBranch(ConditionCode c_code, RegStorage reg, LIR* target);
232    LIR* OpFpRegCopy(RegStorage r_dest, RegStorage r_src);
233    LIR* OpIT(ConditionCode cond, const char* guide);
234    void OpEndIT(LIR* it);
235    LIR* OpMem(OpKind op, RegStorage r_base, int disp);
236    LIR* OpPcRelLoad(RegStorage reg, LIR* target);
237    LIR* OpReg(OpKind op, RegStorage r_dest_src);
238    void OpRegCopy(RegStorage r_dest, RegStorage r_src);
239    LIR* OpRegCopyNoInsert(RegStorage r_dest, RegStorage r_src);
240    LIR* OpRegImm64(OpKind op, RegStorage r_dest_src1, int64_t value);
241    LIR* OpRegImm(OpKind op, RegStorage r_dest_src1, int value);
242    LIR* OpRegMem(OpKind op, RegStorage r_dest, RegStorage r_base, int offset);
243    LIR* OpRegReg(OpKind op, RegStorage r_dest_src1, RegStorage r_src2);
244    LIR* OpMovRegMem(RegStorage r_dest, RegStorage r_base, int offset, MoveType move_type);
245    LIR* OpMovMemReg(RegStorage r_base, int offset, RegStorage r_src, MoveType move_type);
246    LIR* OpCondRegReg(OpKind op, ConditionCode cc, RegStorage r_dest, RegStorage r_src);
247    LIR* OpRegRegImm64(OpKind op, RegStorage r_dest, RegStorage r_src1, int64_t value);
248    LIR* OpRegRegImm(OpKind op, RegStorage r_dest, RegStorage r_src1, int value);
249    LIR* OpRegRegReg(OpKind op, RegStorage r_dest, RegStorage r_src1, RegStorage r_src2);
250    LIR* OpTestSuspend(LIR* target);
251    LIR* OpThreadMem(OpKind op, ThreadOffset<4> thread_offset) OVERRIDE;
252    LIR* OpThreadMem(OpKind op, ThreadOffset<8> thread_offset) OVERRIDE;
253    LIR* OpVldm(RegStorage r_base, int count);
254    LIR* OpVstm(RegStorage r_base, int count);
255    void OpLea(RegStorage r_base, RegStorage reg1, RegStorage reg2, int scale, int offset);
256    void OpRegCopyWide(RegStorage dest, RegStorage src);
257    void OpTlsCmp(ThreadOffset<4> offset, int val) OVERRIDE;
258    void OpTlsCmp(ThreadOffset<8> offset, int val) OVERRIDE;
259
260    LIR* LoadBaseDispBody(RegStorage r_base, int displacement, RegStorage r_dest, OpSize size);
261    LIR* StoreBaseDispBody(RegStorage r_base, int displacement, RegStorage r_src, OpSize size);
262    LIR* OpRegRegRegShift(OpKind op, RegStorage r_dest, RegStorage r_src1, RegStorage r_src2,
263                          int shift);
264    LIR* OpRegRegRegExtend(OpKind op, RegStorage r_dest, RegStorage r_src1, RegStorage r_src2,
265                           A64RegExtEncodings ext, uint8_t amount);
266    LIR* OpRegRegShift(OpKind op, RegStorage r_dest_src1, RegStorage r_src2, int shift);
267    LIR* OpRegRegExtend(OpKind op, RegStorage r_dest_src1, RegStorage r_src2, int shift);
268    static const ArmEncodingMap EncodingMap[kA64Last];
269    int EncodeShift(int code, int amount);
270    int EncodeExtend(int extend_type, int amount);
271    bool IsExtendEncoding(int encoded_value);
272    int EncodeLogicalImmediate(bool is_wide, uint64_t value);
273    uint64_t DecodeLogicalImmediate(bool is_wide, int value);
274
275    ArmConditionCode ArmConditionEncoding(ConditionCode code);
276    bool InexpensiveConstantInt(int32_t value);
277    bool InexpensiveConstantFloat(int32_t value);
278    bool InexpensiveConstantLong(int64_t value);
279    bool InexpensiveConstantDouble(int64_t value);
280
281    void FlushIns(RegLocation* ArgLocs, RegLocation rl_method);
282
283    int GenDalvikArgsNoRange(CallInfo* info, int call_state, LIR** pcrLabel,
284                             NextCallInsn next_call_insn,
285                             const MethodReference& target_method,
286                             uint32_t vtable_idx,
287                             uintptr_t direct_code, uintptr_t direct_method, InvokeType type,
288                             bool skip_this);
289
290    int GenDalvikArgsRange(CallInfo* info, int call_state, LIR** pcrLabel,
291                           NextCallInsn next_call_insn,
292                           const MethodReference& target_method,
293                           uint32_t vtable_idx,
294                           uintptr_t direct_code, uintptr_t direct_method, InvokeType type,
295                           bool skip_this);
296    InToRegStorageMapping in_to_reg_storage_mapping_;
297
298  private:
299    /**
300     * @brief Given register xNN (dNN), returns register wNN (sNN).
301     * @param reg #RegStorage containing a Solo64 input register (e.g. @c x1 or @c d2).
302     * @return A Solo32 with the same register number as the @p reg (e.g. @c w1 or @c s2).
303     * @see As64BitReg
304     */
305    RegStorage As32BitReg(RegStorage reg) {
306      DCHECK(!reg.IsPair());
307      if ((kFailOnSizeError || kReportSizeError) && !reg.Is64Bit()) {
308        if (kFailOnSizeError) {
309          LOG(FATAL) << "Expected 64b register";
310        } else {
311          LOG(WARNING) << "Expected 64b register";
312          return reg;
313        }
314      }
315      RegStorage ret_val = RegStorage(RegStorage::k32BitSolo,
316                                      reg.GetRawBits() & RegStorage::kRegTypeMask);
317      DCHECK_EQ(GetRegInfo(reg)->FindMatchingView(RegisterInfo::k32SoloStorageMask)
318                               ->GetReg().GetReg(),
319                ret_val.GetReg());
320      return ret_val;
321    }
322
323    RegStorage Check32BitReg(RegStorage reg) {
324      if ((kFailOnSizeError || kReportSizeError) && !reg.Is32Bit()) {
325        if (kFailOnSizeError) {
326          LOG(FATAL) << "Checked for 32b register";
327        } else {
328          LOG(WARNING) << "Checked for 32b register";
329          return As32BitReg(reg);
330        }
331      }
332      return reg;
333    }
334
335    /**
336     * @brief Given register wNN (sNN), returns register xNN (dNN).
337     * @param reg #RegStorage containing a Solo32 input register (e.g. @c w1 or @c s2).
338     * @return A Solo64 with the same register number as the @p reg (e.g. @c x1 or @c d2).
339     * @see As32BitReg
340     */
341    RegStorage As64BitReg(RegStorage reg) {
342      DCHECK(!reg.IsPair());
343      if ((kFailOnSizeError || kReportSizeError) && !reg.Is32Bit()) {
344        if (kFailOnSizeError) {
345          LOG(FATAL) << "Expected 32b register";
346        } else {
347          LOG(WARNING) << "Expected 32b register";
348          return reg;
349        }
350      }
351      RegStorage ret_val = RegStorage(RegStorage::k64BitSolo,
352                                      reg.GetRawBits() & RegStorage::kRegTypeMask);
353      DCHECK_EQ(GetRegInfo(reg)->FindMatchingView(RegisterInfo::k64SoloStorageMask)
354                               ->GetReg().GetReg(),
355                ret_val.GetReg());
356      return ret_val;
357    }
358
359    RegStorage Check64BitReg(RegStorage reg) {
360      if ((kFailOnSizeError || kReportSizeError) && !reg.Is64Bit()) {
361        if (kFailOnSizeError) {
362          LOG(FATAL) << "Checked for 64b register";
363        } else {
364          LOG(WARNING) << "Checked for 64b register";
365          return As64BitReg(reg);
366        }
367      }
368      return reg;
369    }
370
371    LIR* LoadFPConstantValue(RegStorage r_dest, int32_t value);
372    LIR* LoadFPConstantValueWide(RegStorage r_dest, int64_t value);
373    void ReplaceFixup(LIR* prev_lir, LIR* orig_lir, LIR* new_lir);
374    void InsertFixupBefore(LIR* prev_lir, LIR* orig_lir, LIR* new_lir);
375    void AssignDataOffsets();
376    RegLocation GenDivRem(RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2,
377                          bool is_div, bool check_zero);
378    RegLocation GenDivRemLit(RegLocation rl_dest, RegLocation rl_src1, int lit, bool is_div);
379};
380
381}  // namespace art
382
383#endif  // ART_COMPILER_DEX_QUICK_ARM64_CODEGEN_ARM64_H_
384