codegen_arm64.h revision 63999683329612292d534e6be09dbde9480f1250
1/* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef ART_COMPILER_DEX_QUICK_ARM64_CODEGEN_ARM64_H_ 18#define ART_COMPILER_DEX_QUICK_ARM64_CODEGEN_ARM64_H_ 19 20#include "arm64_lir.h" 21#include "dex/compiler_internals.h" 22 23#include <map> 24 25namespace art { 26 27class Arm64Mir2Lir FINAL : public Mir2Lir { 28 protected: 29 // TODO: consolidate 64-bit target support. 30 class InToRegStorageMapper { 31 public: 32 virtual RegStorage GetNextReg(bool is_double_or_float, bool is_wide, bool is_ref) = 0; 33 virtual ~InToRegStorageMapper() {} 34 }; 35 36 class InToRegStorageArm64Mapper : public InToRegStorageMapper { 37 public: 38 InToRegStorageArm64Mapper() : cur_core_reg_(0), cur_fp_reg_(0) {} 39 virtual ~InToRegStorageArm64Mapper() {} 40 virtual RegStorage GetNextReg(bool is_double_or_float, bool is_wide, bool is_ref); 41 private: 42 int cur_core_reg_; 43 int cur_fp_reg_; 44 }; 45 46 class InToRegStorageMapping { 47 public: 48 InToRegStorageMapping() : max_mapped_in_(0), is_there_stack_mapped_(false), 49 initialized_(false) {} 50 void Initialize(RegLocation* arg_locs, int count, InToRegStorageMapper* mapper); 51 int GetMaxMappedIn() { return max_mapped_in_; } 52 bool IsThereStackMapped() { return is_there_stack_mapped_; } 53 RegStorage Get(int in_position); 54 bool IsInitialized() { return initialized_; } 55 private: 56 std::map<int, RegStorage> mapping_; 57 int max_mapped_in_; 58 bool is_there_stack_mapped_; 59 bool initialized_; 60 }; 61 62 public: 63 Arm64Mir2Lir(CompilationUnit* cu, MIRGraph* mir_graph, ArenaAllocator* arena); 64 65 // Required for target - codegen helpers. 66 bool SmallLiteralDivRem(Instruction::Code dalvik_opcode, bool is_div, RegLocation rl_src, 67 RegLocation rl_dest, int lit) OVERRIDE; 68 bool SmallLiteralDivRem64(Instruction::Code dalvik_opcode, bool is_div, RegLocation rl_src, 69 RegLocation rl_dest, int64_t lit); 70 bool HandleEasyDivRem(Instruction::Code dalvik_opcode, bool is_div, 71 RegLocation rl_src, RegLocation rl_dest, int lit) OVERRIDE; 72 bool HandleEasyDivRem64(Instruction::Code dalvik_opcode, bool is_div, 73 RegLocation rl_src, RegLocation rl_dest, int64_t lit); 74 bool EasyMultiply(RegLocation rl_src, RegLocation rl_dest, int lit) OVERRIDE; 75 LIR* CheckSuspendUsingLoad() OVERRIDE; 76 RegStorage LoadHelper(QuickEntrypointEnum trampoline) OVERRIDE; 77 LIR* LoadBaseDisp(RegStorage r_base, int displacement, RegStorage r_dest, 78 OpSize size, VolatileKind is_volatile) OVERRIDE; 79 LIR* LoadRefDisp(RegStorage r_base, int displacement, RegStorage r_dest, 80 VolatileKind is_volatile) 81 OVERRIDE; 82 LIR* LoadBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_dest, int scale, 83 OpSize size) OVERRIDE; 84 LIR* LoadRefIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_dest, int scale) 85 OVERRIDE; 86 LIR* LoadConstantNoClobber(RegStorage r_dest, int value); 87 LIR* LoadConstantWide(RegStorage r_dest, int64_t value); 88 LIR* StoreBaseDisp(RegStorage r_base, int displacement, RegStorage r_src, 89 OpSize size, VolatileKind is_volatile) OVERRIDE; 90 LIR* StoreRefDisp(RegStorage r_base, int displacement, RegStorage r_src, 91 VolatileKind is_volatile) OVERRIDE; 92 LIR* StoreBaseIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_src, int scale, 93 OpSize size) OVERRIDE; 94 LIR* StoreRefIndexed(RegStorage r_base, RegStorage r_index, RegStorage r_src, int scale) 95 OVERRIDE; 96 void MarkGCCard(RegStorage val_reg, RegStorage tgt_addr_reg) OVERRIDE; 97 LIR* OpCmpMemImmBranch(ConditionCode cond, RegStorage temp_reg, RegStorage base_reg, 98 int offset, int check_value, LIR* target, LIR** compare) OVERRIDE; 99 100 // Required for target - register utilities. 101 RegStorage TargetReg(SpecialTargetRegister reg) OVERRIDE; 102 RegStorage TargetReg(SpecialTargetRegister symbolic_reg, WideKind wide_kind) OVERRIDE { 103 if (wide_kind == kWide || wide_kind == kRef) { 104 return As64BitReg(TargetReg(symbolic_reg)); 105 } else { 106 return Check32BitReg(TargetReg(symbolic_reg)); 107 } 108 } 109 RegStorage TargetPtrReg(SpecialTargetRegister symbolic_reg) OVERRIDE { 110 return As64BitReg(TargetReg(symbolic_reg)); 111 } 112 RegStorage GetArgMappingToPhysicalReg(int arg_num); 113 RegLocation GetReturnAlt(); 114 RegLocation GetReturnWideAlt(); 115 RegLocation LocCReturn(); 116 RegLocation LocCReturnRef(); 117 RegLocation LocCReturnDouble(); 118 RegLocation LocCReturnFloat(); 119 RegLocation LocCReturnWide(); 120 ResourceMask GetRegMaskCommon(const RegStorage& reg) const OVERRIDE; 121 void AdjustSpillMask(); 122 void ClobberCallerSave(); 123 void FreeCallTemps(); 124 void LockCallTemps(); 125 void CompilerInitializeRegAlloc(); 126 127 // Required for target - miscellaneous. 128 void AssembleLIR(); 129 uint32_t LinkFixupInsns(LIR* head_lir, LIR* tail_lir, CodeOffset offset); 130 int AssignInsnOffsets(); 131 void AssignOffsets(); 132 uint8_t* EncodeLIRs(uint8_t* write_pos, LIR* lir); 133 void DumpResourceMask(LIR* lir, const ResourceMask& mask, const char* prefix) OVERRIDE; 134 void SetupTargetResourceMasks(LIR* lir, uint64_t flags, 135 ResourceMask* use_mask, ResourceMask* def_mask) OVERRIDE; 136 const char* GetTargetInstFmt(int opcode); 137 const char* GetTargetInstName(int opcode); 138 std::string BuildInsnString(const char* fmt, LIR* lir, unsigned char* base_addr); 139 ResourceMask GetPCUseDefEncoding() const OVERRIDE; 140 uint64_t GetTargetInstFlags(int opcode); 141 size_t GetInsnSize(LIR* lir) OVERRIDE; 142 bool IsUnconditionalBranch(LIR* lir); 143 144 // Get the register class for load/store of a field. 145 RegisterClass RegClassForFieldLoadStore(OpSize size, bool is_volatile) OVERRIDE; 146 147 // Required for target - Dalvik-level generators. 148 void GenShiftOpLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1, 149 RegLocation lr_shift); 150 void GenArithImmOpLong(Instruction::Code opcode, RegLocation rl_dest, 151 RegLocation rl_src1, RegLocation rl_src2); 152 void GenArrayGet(int opt_flags, OpSize size, RegLocation rl_array, 153 RegLocation rl_index, RegLocation rl_dest, int scale); 154 void GenArrayPut(int opt_flags, OpSize size, RegLocation rl_array, RegLocation rl_index, 155 RegLocation rl_src, int scale, bool card_mark); 156 void GenShiftImmOpLong(Instruction::Code opcode, RegLocation rl_dest, 157 RegLocation rl_src1, RegLocation rl_shift); 158 void GenLongOp(OpKind op, RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2); 159 void GenMulLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1, 160 RegLocation rl_src2); 161 void GenAddLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1, 162 RegLocation rl_src2); 163 void GenAndLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1, 164 RegLocation rl_src2); 165 void GenArithOpDouble(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1, 166 RegLocation rl_src2); 167 void GenArithOpFloat(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1, 168 RegLocation rl_src2); 169 void GenCmpFP(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1, 170 RegLocation rl_src2); 171 void GenConversion(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src); 172 bool GenInlinedReverseBits(CallInfo* info, OpSize size); 173 bool GenInlinedAbsFloat(CallInfo* info) OVERRIDE; 174 bool GenInlinedAbsDouble(CallInfo* info) OVERRIDE; 175 bool GenInlinedCas(CallInfo* info, bool is_long, bool is_object); 176 bool GenInlinedMinMax(CallInfo* info, bool is_min, bool is_long); 177 bool GenInlinedMinMaxFP(CallInfo* info, bool is_min, bool is_double); 178 bool GenInlinedSqrt(CallInfo* info); 179 bool GenInlinedPeek(CallInfo* info, OpSize size); 180 bool GenInlinedPoke(CallInfo* info, OpSize size); 181 bool GenInlinedAbsLong(CallInfo* info); 182 void GenIntToLong(RegLocation rl_dest, RegLocation rl_src); 183 void GenNotLong(RegLocation rl_dest, RegLocation rl_src); 184 void GenNegLong(RegLocation rl_dest, RegLocation rl_src); 185 void GenOrLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1, 186 RegLocation rl_src2); 187 void GenSubLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1, 188 RegLocation rl_src2); 189 void GenXorLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1, 190 RegLocation rl_src2); 191 void GenDivRemLong(Instruction::Code opcode, RegLocation rl_dest, RegLocation rl_src1, 192 RegLocation rl_src2, bool is_div); 193 RegLocation GenDivRem(RegLocation rl_dest, RegStorage reg_lo, RegStorage reg_hi, bool is_div); 194 RegLocation GenDivRemLit(RegLocation rl_dest, RegStorage reg_lo, int lit, bool is_div); 195 void GenCmpLong(RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2); 196 void GenDivZeroCheckWide(RegStorage reg); 197 void GenEntrySequence(RegLocation* ArgLocs, RegLocation rl_method); 198 void GenExitSequence(); 199 void GenSpecialExitSequence(); 200 void GenFillArrayData(DexOffset table_offset, RegLocation rl_src); 201 void GenFusedFPCmpBranch(BasicBlock* bb, MIR* mir, bool gt_bias, bool is_double); 202 void GenFusedLongCmpBranch(BasicBlock* bb, MIR* mir); 203 void GenSelect(BasicBlock* bb, MIR* mir) OVERRIDE; 204 void GenSelectConst32(RegStorage left_op, RegStorage right_op, ConditionCode code, 205 int32_t true_val, int32_t false_val, RegStorage rs_dest, 206 int dest_reg_class) OVERRIDE; 207 // Helper used in the above two. 208 void GenSelect(int32_t left, int32_t right, ConditionCode code, RegStorage rs_dest, 209 int result_reg_class); 210 211 bool GenMemBarrier(MemBarrierKind barrier_kind); 212 void GenMonitorEnter(int opt_flags, RegLocation rl_src); 213 void GenMonitorExit(int opt_flags, RegLocation rl_src); 214 void GenMoveException(RegLocation rl_dest); 215 void GenMultiplyByTwoBitMultiplier(RegLocation rl_src, RegLocation rl_result, int lit, 216 int first_bit, int second_bit); 217 void GenNegDouble(RegLocation rl_dest, RegLocation rl_src); 218 void GenNegFloat(RegLocation rl_dest, RegLocation rl_src); 219 void GenPackedSwitch(MIR* mir, DexOffset table_offset, RegLocation rl_src); 220 void GenSparseSwitch(MIR* mir, DexOffset table_offset, RegLocation rl_src); 221 222 uint32_t GenPairWise(uint32_t reg_mask, int* reg1, int* reg2); 223 void UnSpillCoreRegs(RegStorage base, int offset, uint32_t reg_mask); 224 void SpillCoreRegs(RegStorage base, int offset, uint32_t reg_mask); 225 void UnSpillFPRegs(RegStorage base, int offset, uint32_t reg_mask); 226 void SpillFPRegs(RegStorage base, int offset, uint32_t reg_mask); 227 228 // Required for target - single operation generators. 229 LIR* OpUnconditionalBranch(LIR* target); 230 LIR* OpCmpBranch(ConditionCode cond, RegStorage src1, RegStorage src2, LIR* target); 231 LIR* OpCmpImmBranch(ConditionCode cond, RegStorage reg, int check_value, LIR* target); 232 LIR* OpCondBranch(ConditionCode cc, LIR* target); 233 LIR* OpDecAndBranch(ConditionCode c_code, RegStorage reg, LIR* target); 234 LIR* OpFpRegCopy(RegStorage r_dest, RegStorage r_src); 235 LIR* OpIT(ConditionCode cond, const char* guide); 236 void OpEndIT(LIR* it); 237 LIR* OpMem(OpKind op, RegStorage r_base, int disp); 238 LIR* OpPcRelLoad(RegStorage reg, LIR* target); 239 LIR* OpReg(OpKind op, RegStorage r_dest_src); 240 void OpRegCopy(RegStorage r_dest, RegStorage r_src); 241 LIR* OpRegCopyNoInsert(RegStorage r_dest, RegStorage r_src); 242 LIR* OpRegImm64(OpKind op, RegStorage r_dest_src1, int64_t value); 243 LIR* OpRegImm(OpKind op, RegStorage r_dest_src1, int value); 244 LIR* OpRegReg(OpKind op, RegStorage r_dest_src1, RegStorage r_src2); 245 LIR* OpMovRegMem(RegStorage r_dest, RegStorage r_base, int offset, MoveType move_type); 246 LIR* OpMovMemReg(RegStorage r_base, int offset, RegStorage r_src, MoveType move_type); 247 LIR* OpCondRegReg(OpKind op, ConditionCode cc, RegStorage r_dest, RegStorage r_src); 248 LIR* OpRegRegImm64(OpKind op, RegStorage r_dest, RegStorage r_src1, int64_t value); 249 LIR* OpRegRegImm(OpKind op, RegStorage r_dest, RegStorage r_src1, int value); 250 LIR* OpRegRegReg(OpKind op, RegStorage r_dest, RegStorage r_src1, RegStorage r_src2); 251 LIR* OpTestSuspend(LIR* target); 252 LIR* OpVldm(RegStorage r_base, int count); 253 LIR* OpVstm(RegStorage r_base, int count); 254 void OpRegCopyWide(RegStorage dest, RegStorage src); 255 256 LIR* LoadBaseDispBody(RegStorage r_base, int displacement, RegStorage r_dest, OpSize size); 257 LIR* StoreBaseDispBody(RegStorage r_base, int displacement, RegStorage r_src, OpSize size); 258 LIR* OpRegRegRegShift(OpKind op, RegStorage r_dest, RegStorage r_src1, RegStorage r_src2, 259 int shift); 260 LIR* OpRegRegRegExtend(OpKind op, RegStorage r_dest, RegStorage r_src1, RegStorage r_src2, 261 A64RegExtEncodings ext, uint8_t amount); 262 LIR* OpRegRegShift(OpKind op, RegStorage r_dest_src1, RegStorage r_src2, int shift); 263 LIR* OpRegRegExtend(OpKind op, RegStorage r_dest_src1, RegStorage r_src2, int shift); 264 static const ArmEncodingMap EncodingMap[kA64Last]; 265 int EncodeShift(int code, int amount); 266 int EncodeExtend(int extend_type, int amount); 267 bool IsExtendEncoding(int encoded_value); 268 int EncodeLogicalImmediate(bool is_wide, uint64_t value); 269 uint64_t DecodeLogicalImmediate(bool is_wide, int value); 270 271 ArmConditionCode ArmConditionEncoding(ConditionCode code); 272 bool InexpensiveConstantInt(int32_t value); 273 bool InexpensiveConstantFloat(int32_t value); 274 bool InexpensiveConstantLong(int64_t value); 275 bool InexpensiveConstantDouble(int64_t value); 276 277 void FlushIns(RegLocation* ArgLocs, RegLocation rl_method); 278 279 int GenDalvikArgsNoRange(CallInfo* info, int call_state, LIR** pcrLabel, 280 NextCallInsn next_call_insn, 281 const MethodReference& target_method, 282 uint32_t vtable_idx, 283 uintptr_t direct_code, uintptr_t direct_method, InvokeType type, 284 bool skip_this); 285 286 int GenDalvikArgsRange(CallInfo* info, int call_state, LIR** pcrLabel, 287 NextCallInsn next_call_insn, 288 const MethodReference& target_method, 289 uint32_t vtable_idx, 290 uintptr_t direct_code, uintptr_t direct_method, InvokeType type, 291 bool skip_this); 292 InToRegStorageMapping in_to_reg_storage_mapping_; 293 294 bool WideGPRsAreAliases() OVERRIDE { 295 return true; // 64b architecture. 296 } 297 bool WideFPRsAreAliases() OVERRIDE { 298 return true; // 64b architecture. 299 } 300 size_t GetInstructionOffset(LIR* lir); 301 302 LIR* InvokeTrampoline(OpKind op, RegStorage r_tgt, QuickEntrypointEnum trampoline) OVERRIDE; 303 304 private: 305 /** 306 * @brief Given register xNN (dNN), returns register wNN (sNN). 307 * @param reg #RegStorage containing a Solo64 input register (e.g. @c x1 or @c d2). 308 * @return A Solo32 with the same register number as the @p reg (e.g. @c w1 or @c s2). 309 * @see As64BitReg 310 */ 311 RegStorage As32BitReg(RegStorage reg) { 312 DCHECK(!reg.IsPair()); 313 if ((kFailOnSizeError || kReportSizeError) && !reg.Is64Bit()) { 314 if (kFailOnSizeError) { 315 LOG(FATAL) << "Expected 64b register"; 316 } else { 317 LOG(WARNING) << "Expected 64b register"; 318 return reg; 319 } 320 } 321 RegStorage ret_val = RegStorage(RegStorage::k32BitSolo, 322 reg.GetRawBits() & RegStorage::kRegTypeMask); 323 DCHECK_EQ(GetRegInfo(reg)->FindMatchingView(RegisterInfo::k32SoloStorageMask) 324 ->GetReg().GetReg(), 325 ret_val.GetReg()); 326 return ret_val; 327 } 328 329 RegStorage Check32BitReg(RegStorage reg) { 330 if ((kFailOnSizeError || kReportSizeError) && !reg.Is32Bit()) { 331 if (kFailOnSizeError) { 332 LOG(FATAL) << "Checked for 32b register"; 333 } else { 334 LOG(WARNING) << "Checked for 32b register"; 335 return As32BitReg(reg); 336 } 337 } 338 return reg; 339 } 340 341 /** 342 * @brief Given register wNN (sNN), returns register xNN (dNN). 343 * @param reg #RegStorage containing a Solo32 input register (e.g. @c w1 or @c s2). 344 * @return A Solo64 with the same register number as the @p reg (e.g. @c x1 or @c d2). 345 * @see As32BitReg 346 */ 347 RegStorage As64BitReg(RegStorage reg) { 348 DCHECK(!reg.IsPair()); 349 if ((kFailOnSizeError || kReportSizeError) && !reg.Is32Bit()) { 350 if (kFailOnSizeError) { 351 LOG(FATAL) << "Expected 32b register"; 352 } else { 353 LOG(WARNING) << "Expected 32b register"; 354 return reg; 355 } 356 } 357 RegStorage ret_val = RegStorage(RegStorage::k64BitSolo, 358 reg.GetRawBits() & RegStorage::kRegTypeMask); 359 DCHECK_EQ(GetRegInfo(reg)->FindMatchingView(RegisterInfo::k64SoloStorageMask) 360 ->GetReg().GetReg(), 361 ret_val.GetReg()); 362 return ret_val; 363 } 364 365 RegStorage Check64BitReg(RegStorage reg) { 366 if ((kFailOnSizeError || kReportSizeError) && !reg.Is64Bit()) { 367 if (kFailOnSizeError) { 368 LOG(FATAL) << "Checked for 64b register"; 369 } else { 370 LOG(WARNING) << "Checked for 64b register"; 371 return As64BitReg(reg); 372 } 373 } 374 return reg; 375 } 376 377 LIR* LoadFPConstantValue(RegStorage r_dest, int32_t value); 378 LIR* LoadFPConstantValueWide(RegStorage r_dest, int64_t value); 379 void ReplaceFixup(LIR* prev_lir, LIR* orig_lir, LIR* new_lir); 380 void InsertFixupBefore(LIR* prev_lir, LIR* orig_lir, LIR* new_lir); 381 void AssignDataOffsets(); 382 RegLocation GenDivRem(RegLocation rl_dest, RegLocation rl_src1, RegLocation rl_src2, 383 bool is_div, bool check_zero); 384 RegLocation GenDivRemLit(RegLocation rl_dest, RegLocation rl_src1, int lit, bool is_div); 385 size_t GetLoadStoreSize(LIR* lir); 386}; 387 388} // namespace art 389 390#endif // ART_COMPILER_DEX_QUICK_ARM64_CODEGEN_ARM64_H_ 391