codegen_arm64.h revision 63fe93d9f9d2956b1ee2b98cdd6ddd2153f5f9cf
1/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_COMPILER_DEX_QUICK_ARM64_CODEGEN_ARM64_H_
18#define ART_COMPILER_DEX_QUICK_ARM64_CODEGEN_ARM64_H_
19
20#include "arm64_lir.h"
21#include "dex/compiler_internals.h"
22
23#include <map>
24
25namespace art {
26
27class Arm64Mir2Lir FINAL : public Mir2Lir {
28 protected:
29  // TODO: consolidate 64-bit target support.
30  class InToRegStorageMapper {
31   public:
32    virtual RegStorage GetNextReg(bool is_double_or_float, bool is_wide, bool is_ref) = 0;
33    virtual ~InToRegStorageMapper() {}
34  };
35
36  class InToRegStorageArm64Mapper : public InToRegStorageMapper {
37   public:
38    InToRegStorageArm64Mapper() : cur_core_reg_(0), cur_fp_reg_(0) {}
39    virtual ~InToRegStorageArm64Mapper() {}
40    virtual RegStorage GetNextReg(bool is_double_or_float, bool is_wide, bool is_ref);
41   private:
42    int cur_core_reg_;
43    int cur_fp_reg_;
44  };
45
46  class InToRegStorageMapping {
47   public:
48    InToRegStorageMapping() : max_mapped_in_(0), is_there_stack_mapped_(false),
49    initialized_(false) {}
50    void Initialize(RegLocation* arg_locs, int count, InToRegStorageMapper* mapper);
51    int GetMaxMappedIn() { return max_mapped_in_; }
52    bool IsThereStackMapped() { return is_there_stack_mapped_; }
53    RegStorage Get(int in_position);
54    bool IsInitialized() { return initialized_; }
55   private:
56    std::map<int, RegStorage> mapping_;
57    int max_mapped_in_;
58    bool is_there_stack_mapped_;
59    bool initialized_;
60  };
61
62  public:
63    Arm64Mir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena);
64
65    // Required for target - codegen helpers.
66    bool SmallLiteralDivRem(Instruction::Code dalvik_opcode, bool is_div, RegLocation rl_src,
67                            RegLocation rl_dest, int lit) OVERRIDE;
68    bool HandleEasyDivRem(Instruction::Code dalvik_opcode, bool is_div,
69                          RegLocation rl_src, RegLocation rl_dest, int lit) OVERRIDE;
70    bool EasyMultiply(RegLocation rl_src, RegLocation rl_dest, int lit) OVERRIDE;
71    LIR* CheckSuspendUsingLoad() OVERRIDE;
72    RegStorage LoadHelper(ThreadOffset<4> offset) OVERRIDE;
73    RegStorage LoadHelper(ThreadOffset<8> offset) OVERRIDE;
74    LIR* LoadBaseDisp(RegStorage r_base, int displacement, RegStorage r_dest,
75                      OpSize size, VolatileKind is_volatile) OVERRIDE;
76    LIR* LoadRefDisp(RegStorage r_base, int displacement, RegStorage r_dest,
77                     VolatileKind is_volatile)
78        OVERRIDE;
79    LIR* LoadBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_dest, int scale,
80                         OpSize size) OVERRIDE;
81    LIR* LoadRefIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_dest) OVERRIDE;
82    LIR* LoadBaseIndexedDisp(RegStorage r_base, RegStorage r_index, int scale, int displacement,
83                             RegStorage r_dest, OpSize size) OVERRIDE;
84    LIR* LoadConstantNoClobber(RegStorage r_dest, int value);
85    LIR* LoadConstantWide(RegStorage r_dest, int64_t value);
86    LIR* StoreBaseDisp(RegStorage r_base, int displacement, RegStorage r_src,
87                       OpSize size, VolatileKind is_volatile) OVERRIDE;
88    LIR* StoreRefDisp(RegStorage r_base, int displacement, RegStorage r_src,
89                      VolatileKind is_volatile) OVERRIDE;
90    LIR* StoreBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_src, int scale,
91                          OpSize size) OVERRIDE;
92    LIR* StoreRefIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_src) OVERRIDE;
93    LIR* StoreBaseIndexedDisp(RegStorage r_base, RegStorage r_index, int scale, int displacement,
94                              RegStorage r_src, OpSize size) OVERRIDE;
95    void MarkGCCard(RegStorage val_reg, RegStorage tgt_addr_reg) OVERRIDE;
96    LIR* OpCmpMemImmBranch(ConditionCode cond, RegStorage temp_reg, RegStorage base_reg,
97                           int offset, int check_value, LIR* target) OVERRIDE;
98
99    // Required for target - register utilities.
100    RegStorage TargetReg(SpecialTargetRegister reg) OVERRIDE;
101    RegStorage TargetReg(SpecialTargetRegister symbolic_reg, bool is_wide) OVERRIDE {
102      RegStorage reg = TargetReg(symbolic_reg);
103      if (is_wide) {
104        return (reg.Is64Bit()) ? reg : As64BitReg(reg);
105      } else {
106        return (reg.Is32Bit()) ? reg : As32BitReg(reg);
107      }
108    }
109    RegStorage TargetRefReg(SpecialTargetRegister symbolic_reg) OVERRIDE {
110      RegStorage reg = TargetReg(symbolic_reg);
111      return (reg.Is64Bit() ? reg : As64BitReg(reg));
112    }
113    RegStorage TargetPtrReg(SpecialTargetRegister symbolic_reg) OVERRIDE {
114      RegStorage reg = TargetReg(symbolic_reg);
115      return (reg.Is64Bit() ? reg : As64BitReg(reg));
116    }
117    RegStorage GetArgMappingToPhysicalReg(int arg_num);
118    RegLocation GetReturnAlt();
119    RegLocation GetReturnWideAlt();
120    RegLocation LocCReturn();
121    RegLocation LocCReturnRef();
122    RegLocation LocCReturnDouble();
123    RegLocation LocCReturnFloat();
124    RegLocation LocCReturnWide();
125    ResourceMask GetRegMaskCommon(const RegStorage& reg) const OVERRIDE;
126    void AdjustSpillMask();
127    void ClobberCallerSave();
128    void FreeCallTemps();
129    void LockCallTemps();
130    void CompilerInitializeRegAlloc();
131
132    // Required for target - miscellaneous.
133    void AssembleLIR();
134    uint32_t LinkFixupInsns(LIR* head_lir, LIR* tail_lir, CodeOffset offset);
135    int AssignInsnOffsets();
136    void AssignOffsets();
137    uint8_t* EncodeLIRs(uint8_t* write_pos, LIR* lir);
138    void DumpResourceMask(LIR* lir, const ResourceMask& mask, const char* prefix) OVERRIDE;
139    void SetupTargetResourceMasks(LIR* lir, uint64_t flags,
140                                  ResourceMask* use_mask, ResourceMask* def_mask) OVERRIDE;
141    const char* GetTargetInstFmt(int opcode);
142    const char* GetTargetInstName(int opcode);
143    std::string BuildInsnString(const char* fmt, LIR* lir, unsigned char* base_addr);
144    ResourceMask GetPCUseDefEncoding() const OVERRIDE;
145    uint64_t GetTargetInstFlags(int opcode);
146    size_t GetInsnSize(LIR* lir) OVERRIDE;
147    bool IsUnconditionalBranch(LIR* lir);
148
149    // Check support for volatile load/store of a given size.
150    bool SupportsVolatileLoadStore(OpSize size) OVERRIDE;
151    // Get the register class for load/store of a field.
152    RegisterClass RegClassForFieldLoadStore(OpSize size, bool is_volatile) OVERRIDE;
153
154    // Required for target - Dalvik-level generators.
155    void GenShiftOpLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
156                        RegLocation lr_shift);
157    void GenArithImmOpLong(Instruction::Code opcode, RegLocation rl_dest,
158                           RegLocation rl_src1, RegLocation rl_src2);
159    void GenArrayGet(int opt_flags, OpSize size, RegLocation rl_array,
160                     RegLocation rl_index, RegLocation rl_dest, int scale);
161    void GenArrayPut(int opt_flags, OpSize size, RegLocation rl_array, RegLocation rl_index,
162                     RegLocation rl_src, int scale, bool card_mark);
163    void GenShiftImmOpLong(Instruction::Code opcode, RegLocation rl_dest,
164                           RegLocation rl_src1, RegLocation rl_shift);
165    void GenLongOp(OpKind op, RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2);
166    void GenMulLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
167                    RegLocation rl_src2);
168    void GenAddLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
169                    RegLocation rl_src2);
170    void GenAndLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
171                    RegLocation rl_src2);
172    void GenArithOpDouble(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
173                          RegLocation rl_src2);
174    void GenArithOpFloat(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
175                         RegLocation rl_src2);
176    void GenCmpFP(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
177                  RegLocation rl_src2);
178    void GenConversion(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src);
179    bool GenInlinedReverseBits(CallInfo* info, OpSize size);
180    bool GenInlinedAbsDouble(CallInfo* info) OVERRIDE;
181    bool GenInlinedCas(CallInfo* info, bool is_long, bool is_object);
182    bool GenInlinedMinMax(CallInfo* info, bool is_min, bool is_long);
183    bool GenInlinedMinMaxFP(CallInfo* info, bool is_min, bool is_double);
184    bool GenInlinedSqrt(CallInfo* info);
185    bool GenInlinedPeek(CallInfo* info, OpSize size);
186    bool GenInlinedPoke(CallInfo* info, OpSize size);
187    bool GenInlinedAbsLong(CallInfo* info);
188    void GenIntToLong(RegLocation rl_dest, RegLocation rl_src);
189    void GenNotLong(RegLocation rl_dest, RegLocation rl_src);
190    void GenNegLong(RegLocation rl_dest, RegLocation rl_src);
191    void GenOrLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
192                   RegLocation rl_src2);
193    void GenSubLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
194                    RegLocation rl_src2);
195    void GenXorLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
196                    RegLocation rl_src2);
197    void GenDivRemLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1,
198                       RegLocation rl_src2, bool is_div);
199    RegLocation GenDivRem(RegLocation rl_dest, RegStorage reg_lo, RegStorage reg_hi, bool is_div);
200    RegLocation GenDivRemLit(RegLocation rl_dest, RegStorage reg_lo, int lit, bool is_div);
201    void GenCmpLong(RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2);
202    void GenDivZeroCheckWide(RegStorage reg);
203    void GenEntrySequence(RegLocation* ArgLocs, RegLocation rl_method);
204    void GenExitSequence();
205    void GenSpecialExitSequence();
206    void GenFillArrayData(DexOffset table_offset, RegLocation rl_src);
207    void GenFusedFPCmpBranch(BasicBlock* bb, MIR* mir, bool gt_bias, bool is_double);
208    void GenFusedLongCmpBranch(BasicBlock* bb, MIR* mir);
209    void GenSelect(BasicBlock* bb, MIR* mir);
210    bool GenMemBarrier(MemBarrierKind barrier_kind);
211    void GenMonitorEnter(int opt_flags, RegLocation rl_src);
212    void GenMonitorExit(int opt_flags, RegLocation rl_src);
213    void GenMoveException(RegLocation rl_dest);
214    void GenMultiplyByTwoBitMultiplier(RegLocation rl_src, RegLocation rl_result, int lit,
215                                       int first_bit, int second_bit);
216    void GenNegDouble(RegLocation rl_dest, RegLocation rl_src);
217    void GenNegFloat(RegLocation rl_dest, RegLocation rl_src);
218    void GenPackedSwitch(MIR* mir, DexOffset table_offset, RegLocation rl_src);
219    void GenSparseSwitch(MIR* mir, DexOffset table_offset, RegLocation rl_src);
220
221    uint32_t GenPairWise(uint32_t reg_mask, int* reg1, int* reg2);
222    void UnSpillCoreRegs(RegStorage base, int offset, uint32_t reg_mask);
223    void SpillCoreRegs(RegStorage base, int offset, uint32_t reg_mask);
224    void UnSpillFPRegs(RegStorage base, int offset, uint32_t reg_mask);
225    void SpillFPRegs(RegStorage base, int offset, uint32_t reg_mask);
226
227    // Required for target - single operation generators.
228    LIR* OpUnconditionalBranch(LIR* target);
229    LIR* OpCmpBranch(ConditionCode cond, RegStorage src1, RegStorage src2, LIR* target);
230    LIR* OpCmpImmBranch(ConditionCode cond, RegStorage reg, int check_value, LIR* target);
231    LIR* OpCondBranch(ConditionCode cc, LIR* target);
232    LIR* OpDecAndBranch(ConditionCode c_code, RegStorage reg, LIR* target);
233    LIR* OpFpRegCopy(RegStorage r_dest, RegStorage r_src);
234    LIR* OpIT(ConditionCode cond, const char* guide);
235    void OpEndIT(LIR* it);
236    LIR* OpMem(OpKind op, RegStorage r_base, int disp);
237    LIR* OpPcRelLoad(RegStorage reg, LIR* target);
238    LIR* OpReg(OpKind op, RegStorage r_dest_src);
239    void OpRegCopy(RegStorage r_dest, RegStorage r_src);
240    LIR* OpRegCopyNoInsert(RegStorage r_dest, RegStorage r_src);
241    LIR* OpRegImm64(OpKind op, RegStorage r_dest_src1, int64_t value);
242    LIR* OpRegImm(OpKind op, RegStorage r_dest_src1, int value);
243    LIR* OpRegMem(OpKind op, RegStorage r_dest, RegStorage r_base, int offset);
244    LIR* OpRegReg(OpKind op, RegStorage r_dest_src1, RegStorage r_src2);
245    LIR* OpMovRegMem(RegStorage r_dest, RegStorage r_base, int offset, MoveType move_type);
246    LIR* OpMovMemReg(RegStorage r_base, int offset, RegStorage r_src, MoveType move_type);
247    LIR* OpCondRegReg(OpKind op, ConditionCode cc, RegStorage r_dest, RegStorage r_src);
248    LIR* OpRegRegImm64(OpKind op, RegStorage r_dest, RegStorage r_src1, int64_t value);
249    LIR* OpRegRegImm(OpKind op, RegStorage r_dest, RegStorage r_src1, int value);
250    LIR* OpRegRegReg(OpKind op, RegStorage r_dest, RegStorage r_src1, RegStorage r_src2);
251    LIR* OpTestSuspend(LIR* target);
252    LIR* OpThreadMem(OpKind op, ThreadOffset<4> thread_offset) OVERRIDE;
253    LIR* OpThreadMem(OpKind op, ThreadOffset<8> thread_offset) OVERRIDE;
254    LIR* OpVldm(RegStorage r_base, int count);
255    LIR* OpVstm(RegStorage r_base, int count);
256    void OpLea(RegStorage r_base, RegStorage reg1, RegStorage reg2, int scale, int offset);
257    void OpRegCopyWide(RegStorage dest, RegStorage src);
258    void OpTlsCmp(ThreadOffset<4> offset, int val) OVERRIDE;
259    void OpTlsCmp(ThreadOffset<8> offset, int val) OVERRIDE;
260
261    LIR* LoadBaseDispBody(RegStorage r_base, int displacement, RegStorage r_dest, OpSize size);
262    LIR* StoreBaseDispBody(RegStorage r_base, int displacement, RegStorage r_src, OpSize size);
263    LIR* OpRegRegRegShift(OpKind op, RegStorage r_dest, RegStorage r_src1, RegStorage r_src2,
264                          int shift);
265    LIR* OpRegRegRegExtend(OpKind op, RegStorage r_dest, RegStorage r_src1, RegStorage r_src2,
266                           A64RegExtEncodings ext, uint8_t amount);
267    LIR* OpRegRegShift(OpKind op, RegStorage r_dest_src1, RegStorage r_src2, int shift);
268    LIR* OpRegRegExtend(OpKind op, RegStorage r_dest_src1, RegStorage r_src2, int shift);
269    static const ArmEncodingMap EncodingMap[kA64Last];
270    int EncodeShift(int code, int amount);
271    int EncodeExtend(int extend_type, int amount);
272    bool IsExtendEncoding(int encoded_value);
273    int EncodeLogicalImmediate(bool is_wide, uint64_t value);
274    uint64_t DecodeLogicalImmediate(bool is_wide, int value);
275
276    ArmConditionCode ArmConditionEncoding(ConditionCode code);
277    bool InexpensiveConstantInt(int32_t value);
278    bool InexpensiveConstantFloat(int32_t value);
279    bool InexpensiveConstantLong(int64_t value);
280    bool InexpensiveConstantDouble(int64_t value);
281
282    void FlushIns(RegLocation* ArgLocs, RegLocation rl_method);
283
284    int GenDalvikArgsNoRange(CallInfo* info, int call_state, LIR** pcrLabel,
285                             NextCallInsn next_call_insn,
286                             const MethodReference& target_method,
287                             uint32_t vtable_idx,
288                             uintptr_t direct_code, uintptr_t direct_method, InvokeType type,
289                             bool skip_this);
290
291    int GenDalvikArgsRange(CallInfo* info, int call_state, LIR** pcrLabel,
292                           NextCallInsn next_call_insn,
293                           const MethodReference& target_method,
294                           uint32_t vtable_idx,
295                           uintptr_t direct_code, uintptr_t direct_method, InvokeType type,
296                           bool skip_this);
297    InToRegStorageMapping in_to_reg_storage_mapping_;
298
299  private:
300    /**
301     * @brief Given register xNN (dNN), returns register wNN (sNN).
302     * @param reg #RegStorage containing a Solo64 input register (e.g. @c x1 or @c d2).
303     * @return A Solo32 with the same register number as the @p reg (e.g. @c w1 or @c s2).
304     * @see As64BitReg
305     */
306    RegStorage As32BitReg(RegStorage reg) {
307      DCHECK(!reg.IsPair());
308      if ((kFailOnSizeError || kReportSizeError) && !reg.Is64Bit()) {
309        if (kFailOnSizeError) {
310          LOG(FATAL) << "Expected 64b register";
311        } else {
312          LOG(WARNING) << "Expected 64b register";
313          return reg;
314        }
315      }
316      RegStorage ret_val = RegStorage(RegStorage::k32BitSolo,
317                                      reg.GetRawBits() & RegStorage::kRegTypeMask);
318      DCHECK_EQ(GetRegInfo(reg)->FindMatchingView(RegisterInfo::k32SoloStorageMask)
319                               ->GetReg().GetReg(),
320                ret_val.GetReg());
321      return ret_val;
322    }
323
324    RegStorage Check32BitReg(RegStorage reg) {
325      if ((kFailOnSizeError || kReportSizeError) && !reg.Is32Bit()) {
326        if (kFailOnSizeError) {
327          LOG(FATAL) << "Checked for 32b register";
328        } else {
329          LOG(WARNING) << "Checked for 32b register";
330          return As32BitReg(reg);
331        }
332      }
333      return reg;
334    }
335
336    /**
337     * @brief Given register wNN (sNN), returns register xNN (dNN).
338     * @param reg #RegStorage containing a Solo32 input register (e.g. @c w1 or @c s2).
339     * @return A Solo64 with the same register number as the @p reg (e.g. @c x1 or @c d2).
340     * @see As32BitReg
341     */
342    RegStorage As64BitReg(RegStorage reg) {
343      DCHECK(!reg.IsPair());
344      if ((kFailOnSizeError || kReportSizeError) && !reg.Is32Bit()) {
345        if (kFailOnSizeError) {
346          LOG(FATAL) << "Expected 32b register";
347        } else {
348          LOG(WARNING) << "Expected 32b register";
349          return reg;
350        }
351      }
352      RegStorage ret_val = RegStorage(RegStorage::k64BitSolo,
353                                      reg.GetRawBits() & RegStorage::kRegTypeMask);
354      DCHECK_EQ(GetRegInfo(reg)->FindMatchingView(RegisterInfo::k64SoloStorageMask)
355                               ->GetReg().GetReg(),
356                ret_val.GetReg());
357      return ret_val;
358    }
359
360    RegStorage Check64BitReg(RegStorage reg) {
361      if ((kFailOnSizeError || kReportSizeError) && !reg.Is64Bit()) {
362        if (kFailOnSizeError) {
363          LOG(FATAL) << "Checked for 64b register";
364        } else {
365          LOG(WARNING) << "Checked for 64b register";
366          return As64BitReg(reg);
367        }
368      }
369      return reg;
370    }
371
372    LIR* LoadFPConstantValue(RegStorage r_dest, int32_t value);
373    LIR* LoadFPConstantValueWide(RegStorage r_dest, int64_t value);
374    void ReplaceFixup(LIR* prev_lir, LIR* orig_lir, LIR* new_lir);
375    void InsertFixupBefore(LIR* prev_lir, LIR* orig_lir, LIR* new_lir);
376    void AssignDataOffsets();
377    RegLocation GenDivRem(RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2,
378                          bool is_div, bool check_zero);
379    RegLocation GenDivRemLit(RegLocation rl_dest, RegLocation rl_src1, int lit, bool is_div);
380};
381
382}  // namespace art
383
384#endif  // ART_COMPILER_DEX_QUICK_ARM64_CODEGEN_ARM64_H_
385