instruction_simplifier_arm64.cc revision 7fc6350f6f1ab04b52b9cd7542e0790528296cbe
1/* 2 * Copyright (C) 2015 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "instruction_simplifier_arm64.h" 18 19#include "common_arm64.h" 20#include "instruction_simplifier_shared.h" 21#include "mirror/array-inl.h" 22 23namespace art { 24namespace arm64 { 25 26using helpers::CanFitInShifterOperand; 27using helpers::HasShifterOperand; 28using helpers::ShifterOperandSupportsExtension; 29 30void InstructionSimplifierArm64Visitor::TryExtractArrayAccessAddress(HInstruction* access, 31 HInstruction* array, 32 HInstruction* index, 33 int access_size) { 34 if (kEmitCompilerReadBarrier) { 35 // The read barrier instrumentation does not support the 36 // HArm64IntermediateAddress instruction yet. 37 // 38 // TODO: Handle this case properly in the ARM64 code generator and 39 // re-enable this optimization; otherwise, remove this TODO. 40 // b/26601270 41 return; 42 } 43 if (index->IsConstant() || 44 (index->IsBoundsCheck() && index->AsBoundsCheck()->GetIndex()->IsConstant())) { 45 // When the index is a constant all the addressing can be fitted in the 46 // memory access instruction, so do not split the access. 47 return; 48 } 49 if (access->IsArraySet() && 50 access->AsArraySet()->GetValue()->GetType() == Primitive::kPrimNot) { 51 // The access may require a runtime call or the original array pointer. 52 return; 53 } 54 55 // Proceed to extract the base address computation. 56 ArenaAllocator* arena = GetGraph()->GetArena(); 57 58 HIntConstant* offset = 59 GetGraph()->GetIntConstant(mirror::Array::DataOffset(access_size).Uint32Value()); 60 HArm64IntermediateAddress* address = 61 new (arena) HArm64IntermediateAddress(array, offset, kNoDexPc); 62 address->SetReferenceTypeInfo(array->GetReferenceTypeInfo()); 63 access->GetBlock()->InsertInstructionBefore(address, access); 64 access->ReplaceInput(address, 0); 65 // Both instructions must depend on GC to prevent any instruction that can 66 // trigger GC to be inserted between the two. 67 access->AddSideEffects(SideEffects::DependsOnGC()); 68 DCHECK(address->GetSideEffects().Includes(SideEffects::DependsOnGC())); 69 DCHECK(access->GetSideEffects().Includes(SideEffects::DependsOnGC())); 70 // TODO: Code generation for HArrayGet and HArraySet will check whether the input address 71 // is an HArm64IntermediateAddress and generate appropriate code. 72 // We would like to replace the `HArrayGet` and `HArraySet` with custom instructions (maybe 73 // `HArm64Load` and `HArm64Store`). We defer these changes because these new instructions would 74 // not bring any advantages yet. 75 // Also see the comments in 76 // `InstructionCodeGeneratorARM64::VisitArrayGet()` and 77 // `InstructionCodeGeneratorARM64::VisitArraySet()`. 78 RecordSimplification(); 79} 80 81bool InstructionSimplifierArm64Visitor::TryMergeIntoShifterOperand(HInstruction* use, 82 HInstruction* bitfield_op, 83 bool do_merge) { 84 DCHECK(HasShifterOperand(use)); 85 DCHECK(use->IsBinaryOperation() || use->IsNeg()); 86 DCHECK(CanFitInShifterOperand(bitfield_op)); 87 DCHECK(!bitfield_op->HasEnvironmentUses()); 88 89 Primitive::Type type = use->GetType(); 90 if (type != Primitive::kPrimInt && type != Primitive::kPrimLong) { 91 return false; 92 } 93 94 HInstruction* left; 95 HInstruction* right; 96 if (use->IsBinaryOperation()) { 97 left = use->InputAt(0); 98 right = use->InputAt(1); 99 } else { 100 DCHECK(use->IsNeg()); 101 right = use->AsNeg()->InputAt(0); 102 left = GetGraph()->GetConstant(right->GetType(), 0); 103 } 104 DCHECK(left == bitfield_op || right == bitfield_op); 105 106 if (left == right) { 107 // TODO: Handle special transformations in this situation? 108 // For example should we transform `(x << 1) + (x << 1)` into `(x << 2)`? 109 // Or should this be part of a separate transformation logic? 110 return false; 111 } 112 113 bool is_commutative = use->IsBinaryOperation() && use->AsBinaryOperation()->IsCommutative(); 114 HInstruction* other_input; 115 if (bitfield_op == right) { 116 other_input = left; 117 } else { 118 if (is_commutative) { 119 other_input = right; 120 } else { 121 return false; 122 } 123 } 124 125 HArm64DataProcWithShifterOp::OpKind op_kind; 126 int shift_amount = 0; 127 HArm64DataProcWithShifterOp::GetOpInfoFromInstruction(bitfield_op, &op_kind, &shift_amount); 128 129 if (HArm64DataProcWithShifterOp::IsExtensionOp(op_kind) && 130 !ShifterOperandSupportsExtension(use)) { 131 return false; 132 } 133 134 if (do_merge) { 135 HArm64DataProcWithShifterOp* alu_with_op = 136 new (GetGraph()->GetArena()) HArm64DataProcWithShifterOp(use, 137 other_input, 138 bitfield_op->InputAt(0), 139 op_kind, 140 shift_amount, 141 use->GetDexPc()); 142 use->GetBlock()->ReplaceAndRemoveInstructionWith(use, alu_with_op); 143 if (bitfield_op->GetUses().IsEmpty()) { 144 bitfield_op->GetBlock()->RemoveInstruction(bitfield_op); 145 } 146 RecordSimplification(); 147 } 148 149 return true; 150} 151 152// Merge a bitfield move instruction into its uses if it can be merged in all of them. 153bool InstructionSimplifierArm64Visitor::TryMergeIntoUsersShifterOperand(HInstruction* bitfield_op) { 154 DCHECK(CanFitInShifterOperand(bitfield_op)); 155 156 if (bitfield_op->HasEnvironmentUses()) { 157 return false; 158 } 159 160 const HUseList<HInstruction*>& uses = bitfield_op->GetUses(); 161 162 // Check whether we can merge the instruction in all its users' shifter operand. 163 for (HUseIterator<HInstruction*> it_use(uses); !it_use.Done(); it_use.Advance()) { 164 HInstruction* use = it_use.Current()->GetUser(); 165 if (!HasShifterOperand(use)) { 166 return false; 167 } 168 if (!CanMergeIntoShifterOperand(use, bitfield_op)) { 169 return false; 170 } 171 } 172 173 // Merge the instruction into its uses. 174 for (HUseIterator<HInstruction*> it_use(uses); !it_use.Done(); it_use.Advance()) { 175 HInstruction* use = it_use.Current()->GetUser(); 176 bool merged = MergeIntoShifterOperand(use, bitfield_op); 177 DCHECK(merged); 178 } 179 180 return true; 181} 182 183void InstructionSimplifierArm64Visitor::VisitAnd(HAnd* instruction) { 184 if (TryMergeNegatedInput(instruction)) { 185 RecordSimplification(); 186 } 187} 188 189void InstructionSimplifierArm64Visitor::VisitArrayGet(HArrayGet* instruction) { 190 TryExtractArrayAccessAddress(instruction, 191 instruction->GetArray(), 192 instruction->GetIndex(), 193 Primitive::ComponentSize(instruction->GetType())); 194} 195 196void InstructionSimplifierArm64Visitor::VisitArraySet(HArraySet* instruction) { 197 TryExtractArrayAccessAddress(instruction, 198 instruction->GetArray(), 199 instruction->GetIndex(), 200 Primitive::ComponentSize(instruction->GetComponentType())); 201} 202 203void InstructionSimplifierArm64Visitor::VisitMul(HMul* instruction) { 204 if (TryCombineMultiplyAccumulate(instruction, kArm64)) { 205 RecordSimplification(); 206 } 207} 208 209void InstructionSimplifierArm64Visitor::VisitOr(HOr* instruction) { 210 if (TryMergeNegatedInput(instruction)) { 211 RecordSimplification(); 212 } 213} 214 215void InstructionSimplifierArm64Visitor::VisitShl(HShl* instruction) { 216 if (instruction->InputAt(1)->IsConstant()) { 217 TryMergeIntoUsersShifterOperand(instruction); 218 } 219} 220 221void InstructionSimplifierArm64Visitor::VisitShr(HShr* instruction) { 222 if (instruction->InputAt(1)->IsConstant()) { 223 TryMergeIntoUsersShifterOperand(instruction); 224 } 225} 226 227void InstructionSimplifierArm64Visitor::VisitTypeConversion(HTypeConversion* instruction) { 228 Primitive::Type result_type = instruction->GetResultType(); 229 Primitive::Type input_type = instruction->GetInputType(); 230 231 if (input_type == result_type) { 232 // We let the arch-independent code handle this. 233 return; 234 } 235 236 if (Primitive::IsIntegralType(result_type) && Primitive::IsIntegralType(input_type)) { 237 TryMergeIntoUsersShifterOperand(instruction); 238 } 239} 240 241void InstructionSimplifierArm64Visitor::VisitUShr(HUShr* instruction) { 242 if (instruction->InputAt(1)->IsConstant()) { 243 TryMergeIntoUsersShifterOperand(instruction); 244 } 245} 246 247void InstructionSimplifierArm64Visitor::VisitXor(HXor* instruction) { 248 if (TryMergeNegatedInput(instruction)) { 249 RecordSimplification(); 250 } 251} 252 253} // namespace arm64 254} // namespace art 255