code_generator_arm.cc revision a8eef82f394f31272610d7ed80328ee465fa1a0f
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "code_generator_arm.h" 18 19#include "arch/arm/instruction_set_features_arm.h" 20#include "entrypoints/quick/quick_entrypoints.h" 21#include "gc/accounting/card_table.h" 22#include "mirror/array-inl.h" 23#include "mirror/art_method.h" 24#include "mirror/class.h" 25#include "thread.h" 26#include "utils/arm/assembler_arm.h" 27#include "utils/arm/managed_register_arm.h" 28#include "utils/assembler.h" 29#include "utils/stack_checks.h" 30 31namespace art { 32 33namespace arm { 34 35static DRegister FromLowSToD(SRegister reg) { 36 DCHECK_EQ(reg % 2, 0); 37 return static_cast<DRegister>(reg / 2); 38} 39 40static constexpr bool kExplicitStackOverflowCheck = false; 41 42static constexpr int kNumberOfPushedRegistersAtEntry = 1 + 2; // LR, R6, R7 43static constexpr int kCurrentMethodStackOffset = 0; 44 45static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2, R3 }; 46static constexpr size_t kRuntimeParameterCoreRegistersLength = 47 arraysize(kRuntimeParameterCoreRegisters); 48static constexpr SRegister kRuntimeParameterFpuRegisters[] = { S0, S1, S2, S3 }; 49static constexpr size_t kRuntimeParameterFpuRegistersLength = 50 arraysize(kRuntimeParameterFpuRegisters); 51 52class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> { 53 public: 54 InvokeRuntimeCallingConvention() 55 : CallingConvention(kRuntimeParameterCoreRegisters, 56 kRuntimeParameterCoreRegistersLength, 57 kRuntimeParameterFpuRegisters, 58 kRuntimeParameterFpuRegistersLength) {} 59 60 private: 61 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention); 62}; 63 64#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())-> 65#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value() 66 67class SlowPathCodeARM : public SlowPathCode { 68 public: 69 SlowPathCodeARM() : entry_label_(), exit_label_() {} 70 71 Label* GetEntryLabel() { return &entry_label_; } 72 Label* GetExitLabel() { return &exit_label_; } 73 74 private: 75 Label entry_label_; 76 Label exit_label_; 77 78 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM); 79}; 80 81class NullCheckSlowPathARM : public SlowPathCodeARM { 82 public: 83 explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {} 84 85 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 86 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 87 __ Bind(GetEntryLabel()); 88 arm_codegen->InvokeRuntime( 89 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc()); 90 } 91 92 private: 93 HNullCheck* const instruction_; 94 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM); 95}; 96 97class DivZeroCheckSlowPathARM : public SlowPathCodeARM { 98 public: 99 explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {} 100 101 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 102 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 103 __ Bind(GetEntryLabel()); 104 arm_codegen->InvokeRuntime( 105 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc()); 106 } 107 108 private: 109 HDivZeroCheck* const instruction_; 110 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM); 111}; 112 113class StackOverflowCheckSlowPathARM : public SlowPathCodeARM { 114 public: 115 StackOverflowCheckSlowPathARM() {} 116 117 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 118 __ Bind(GetEntryLabel()); 119 __ LoadFromOffset(kLoadWord, PC, TR, 120 QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowStackOverflow).Int32Value()); 121 } 122 123 private: 124 DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM); 125}; 126 127class SuspendCheckSlowPathARM : public SlowPathCodeARM { 128 public: 129 SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor) 130 : instruction_(instruction), successor_(successor) {} 131 132 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 133 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 134 __ Bind(GetEntryLabel()); 135 codegen->SaveLiveRegisters(instruction_->GetLocations()); 136 arm_codegen->InvokeRuntime( 137 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc()); 138 codegen->RestoreLiveRegisters(instruction_->GetLocations()); 139 if (successor_ == nullptr) { 140 __ b(GetReturnLabel()); 141 } else { 142 __ b(arm_codegen->GetLabelOf(successor_)); 143 } 144 } 145 146 Label* GetReturnLabel() { 147 DCHECK(successor_ == nullptr); 148 return &return_label_; 149 } 150 151 private: 152 HSuspendCheck* const instruction_; 153 // If not null, the block to branch to after the suspend check. 154 HBasicBlock* const successor_; 155 156 // If `successor_` is null, the label to branch to after the suspend check. 157 Label return_label_; 158 159 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM); 160}; 161 162class BoundsCheckSlowPathARM : public SlowPathCodeARM { 163 public: 164 BoundsCheckSlowPathARM(HBoundsCheck* instruction, 165 Location index_location, 166 Location length_location) 167 : instruction_(instruction), 168 index_location_(index_location), 169 length_location_(length_location) {} 170 171 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 172 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 173 __ Bind(GetEntryLabel()); 174 // We're moving two locations to locations that could overlap, so we need a parallel 175 // move resolver. 176 InvokeRuntimeCallingConvention calling_convention; 177 codegen->EmitParallelMoves( 178 index_location_, 179 Location::RegisterLocation(calling_convention.GetRegisterAt(0)), 180 length_location_, 181 Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 182 arm_codegen->InvokeRuntime( 183 QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc()); 184 } 185 186 private: 187 HBoundsCheck* const instruction_; 188 const Location index_location_; 189 const Location length_location_; 190 191 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM); 192}; 193 194class LoadClassSlowPathARM : public SlowPathCodeARM { 195 public: 196 LoadClassSlowPathARM(HLoadClass* cls, 197 HInstruction* at, 198 uint32_t dex_pc, 199 bool do_clinit) 200 : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) { 201 DCHECK(at->IsLoadClass() || at->IsClinitCheck()); 202 } 203 204 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 205 LocationSummary* locations = at_->GetLocations(); 206 207 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 208 __ Bind(GetEntryLabel()); 209 codegen->SaveLiveRegisters(locations); 210 211 InvokeRuntimeCallingConvention calling_convention; 212 __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex()); 213 arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1)); 214 int32_t entry_point_offset = do_clinit_ 215 ? QUICK_ENTRY_POINT(pInitializeStaticStorage) 216 : QUICK_ENTRY_POINT(pInitializeType); 217 arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_); 218 219 // Move the class to the desired location. 220 Location out = locations->Out(); 221 if (out.IsValid()) { 222 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg())); 223 arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0)); 224 } 225 codegen->RestoreLiveRegisters(locations); 226 __ b(GetExitLabel()); 227 } 228 229 private: 230 // The class this slow path will load. 231 HLoadClass* const cls_; 232 233 // The instruction where this slow path is happening. 234 // (Might be the load class or an initialization check). 235 HInstruction* const at_; 236 237 // The dex PC of `at_`. 238 const uint32_t dex_pc_; 239 240 // Whether to initialize the class. 241 const bool do_clinit_; 242 243 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM); 244}; 245 246class LoadStringSlowPathARM : public SlowPathCodeARM { 247 public: 248 explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {} 249 250 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 251 LocationSummary* locations = instruction_->GetLocations(); 252 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); 253 254 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 255 __ Bind(GetEntryLabel()); 256 codegen->SaveLiveRegisters(locations); 257 258 InvokeRuntimeCallingConvention calling_convention; 259 arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1)); 260 __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction_->GetStringIndex()); 261 arm_codegen->InvokeRuntime( 262 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc()); 263 arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0)); 264 265 codegen->RestoreLiveRegisters(locations); 266 __ b(GetExitLabel()); 267 } 268 269 private: 270 HLoadString* const instruction_; 271 272 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM); 273}; 274 275class TypeCheckSlowPathARM : public SlowPathCodeARM { 276 public: 277 TypeCheckSlowPathARM(HInstruction* instruction, 278 Location class_to_check, 279 Location object_class, 280 uint32_t dex_pc) 281 : instruction_(instruction), 282 class_to_check_(class_to_check), 283 object_class_(object_class), 284 dex_pc_(dex_pc) {} 285 286 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 287 LocationSummary* locations = instruction_->GetLocations(); 288 DCHECK(instruction_->IsCheckCast() 289 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); 290 291 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 292 __ Bind(GetEntryLabel()); 293 codegen->SaveLiveRegisters(locations); 294 295 // We're moving two locations to locations that could overlap, so we need a parallel 296 // move resolver. 297 InvokeRuntimeCallingConvention calling_convention; 298 codegen->EmitParallelMoves( 299 class_to_check_, 300 Location::RegisterLocation(calling_convention.GetRegisterAt(0)), 301 object_class_, 302 Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 303 304 if (instruction_->IsInstanceOf()) { 305 arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_); 306 arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0)); 307 } else { 308 DCHECK(instruction_->IsCheckCast()); 309 arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_); 310 } 311 312 codegen->RestoreLiveRegisters(locations); 313 __ b(GetExitLabel()); 314 } 315 316 private: 317 HInstruction* const instruction_; 318 const Location class_to_check_; 319 const Location object_class_; 320 uint32_t dex_pc_; 321 322 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM); 323}; 324 325#undef __ 326 327#undef __ 328#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())-> 329 330inline Condition ARMCondition(IfCondition cond) { 331 switch (cond) { 332 case kCondEQ: return EQ; 333 case kCondNE: return NE; 334 case kCondLT: return LT; 335 case kCondLE: return LE; 336 case kCondGT: return GT; 337 case kCondGE: return GE; 338 default: 339 LOG(FATAL) << "Unknown if condition"; 340 } 341 return EQ; // Unreachable. 342} 343 344inline Condition ARMOppositeCondition(IfCondition cond) { 345 switch (cond) { 346 case kCondEQ: return NE; 347 case kCondNE: return EQ; 348 case kCondLT: return GE; 349 case kCondLE: return GT; 350 case kCondGT: return LE; 351 case kCondGE: return LT; 352 default: 353 LOG(FATAL) << "Unknown if condition"; 354 } 355 return EQ; // Unreachable. 356} 357 358void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const { 359 stream << ArmManagedRegister::FromCoreRegister(Register(reg)); 360} 361 362void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const { 363 stream << ArmManagedRegister::FromSRegister(SRegister(reg)); 364} 365 366size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) { 367 __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index); 368 return kArmWordSize; 369} 370 371size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) { 372 __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index); 373 return kArmWordSize; 374} 375 376size_t CodeGeneratorARM::SaveFloatingPointRegister(size_t stack_index, uint32_t reg_id) { 377 __ StoreSToOffset(static_cast<SRegister>(reg_id), SP, stack_index); 378 return kArmWordSize; 379} 380 381size_t CodeGeneratorARM::RestoreFloatingPointRegister(size_t stack_index, uint32_t reg_id) { 382 __ LoadSFromOffset(static_cast<SRegister>(reg_id), SP, stack_index); 383 return kArmWordSize; 384} 385 386CodeGeneratorARM::CodeGeneratorARM(HGraph* graph, 387 const ArmInstructionSetFeatures* isa_features) 388 : CodeGenerator(graph, kNumberOfCoreRegisters, kNumberOfSRegisters, kNumberOfRegisterPairs), 389 block_labels_(graph->GetArena(), 0), 390 location_builder_(graph, this), 391 instruction_visitor_(graph, this), 392 move_resolver_(graph->GetArena(), this), 393 assembler_(true), 394 isa_features_(isa_features) {} 395 396size_t CodeGeneratorARM::FrameEntrySpillSize() const { 397 return kNumberOfPushedRegistersAtEntry * kArmWordSize; 398} 399 400Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const { 401 switch (type) { 402 case Primitive::kPrimLong: { 403 size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs); 404 ArmManagedRegister pair = 405 ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg)); 406 DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]); 407 DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]); 408 409 blocked_core_registers_[pair.AsRegisterPairLow()] = true; 410 blocked_core_registers_[pair.AsRegisterPairHigh()] = true; 411 UpdateBlockedPairRegisters(); 412 return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh()); 413 } 414 415 case Primitive::kPrimByte: 416 case Primitive::kPrimBoolean: 417 case Primitive::kPrimChar: 418 case Primitive::kPrimShort: 419 case Primitive::kPrimInt: 420 case Primitive::kPrimNot: { 421 int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters); 422 // Block all register pairs that contain `reg`. 423 for (int i = 0; i < kNumberOfRegisterPairs; i++) { 424 ArmManagedRegister current = 425 ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i)); 426 if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) { 427 blocked_register_pairs_[i] = true; 428 } 429 } 430 return Location::RegisterLocation(reg); 431 } 432 433 case Primitive::kPrimFloat: { 434 int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters); 435 return Location::FpuRegisterLocation(reg); 436 } 437 438 case Primitive::kPrimDouble: { 439 int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters); 440 DCHECK_EQ(reg % 2, 0); 441 return Location::FpuRegisterPairLocation(reg, reg + 1); 442 } 443 444 case Primitive::kPrimVoid: 445 LOG(FATAL) << "Unreachable type " << type; 446 } 447 448 return Location(); 449} 450 451void CodeGeneratorARM::SetupBlockedRegisters() const { 452 // Don't allocate the dalvik style register pair passing. 453 blocked_register_pairs_[R1_R2] = true; 454 455 // Stack register, LR and PC are always reserved. 456 blocked_core_registers_[SP] = true; 457 blocked_core_registers_[LR] = true; 458 blocked_core_registers_[PC] = true; 459 460 // Reserve thread register. 461 blocked_core_registers_[TR] = true; 462 463 // Reserve temp register. 464 blocked_core_registers_[IP] = true; 465 466 // TODO: We currently don't use Quick's callee saved registers. 467 // We always save and restore R6 and R7 to make sure we can use three 468 // register pairs for long operations. 469 blocked_core_registers_[R4] = true; 470 blocked_core_registers_[R5] = true; 471 blocked_core_registers_[R8] = true; 472 blocked_core_registers_[R10] = true; 473 blocked_core_registers_[R11] = true; 474 475 blocked_fpu_registers_[S16] = true; 476 blocked_fpu_registers_[S17] = true; 477 blocked_fpu_registers_[S18] = true; 478 blocked_fpu_registers_[S19] = true; 479 blocked_fpu_registers_[S20] = true; 480 blocked_fpu_registers_[S21] = true; 481 blocked_fpu_registers_[S22] = true; 482 blocked_fpu_registers_[S23] = true; 483 blocked_fpu_registers_[S24] = true; 484 blocked_fpu_registers_[S25] = true; 485 blocked_fpu_registers_[S26] = true; 486 blocked_fpu_registers_[S27] = true; 487 blocked_fpu_registers_[S28] = true; 488 blocked_fpu_registers_[S29] = true; 489 blocked_fpu_registers_[S30] = true; 490 blocked_fpu_registers_[S31] = true; 491 492 UpdateBlockedPairRegisters(); 493} 494 495void CodeGeneratorARM::UpdateBlockedPairRegisters() const { 496 for (int i = 0; i < kNumberOfRegisterPairs; i++) { 497 ArmManagedRegister current = 498 ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i)); 499 if (blocked_core_registers_[current.AsRegisterPairLow()] 500 || blocked_core_registers_[current.AsRegisterPairHigh()]) { 501 blocked_register_pairs_[i] = true; 502 } 503 } 504} 505 506InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen) 507 : HGraphVisitor(graph), 508 assembler_(codegen->GetAssembler()), 509 codegen_(codegen) {} 510 511void CodeGeneratorARM::GenerateFrameEntry() { 512 bool skip_overflow_check = 513 IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm); 514 if (!skip_overflow_check) { 515 if (kExplicitStackOverflowCheck) { 516 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM(); 517 AddSlowPath(slow_path); 518 519 __ LoadFromOffset(kLoadWord, IP, TR, Thread::StackEndOffset<kArmWordSize>().Int32Value()); 520 __ cmp(SP, ShifterOperand(IP)); 521 __ b(slow_path->GetEntryLabel(), CC); 522 } else { 523 __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm))); 524 __ LoadFromOffset(kLoadWord, IP, IP, 0); 525 RecordPcInfo(nullptr, 0); 526 } 527 } 528 529 core_spill_mask_ |= (1 << LR | 1 << R6 | 1 << R7); 530 __ PushList(1 << LR | 1 << R6 | 1 << R7); 531 532 // The return PC has already been pushed on the stack. 533 __ AddConstant(SP, -(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize)); 534 __ StoreToOffset(kStoreWord, R0, SP, 0); 535} 536 537void CodeGeneratorARM::GenerateFrameExit() { 538 __ AddConstant(SP, GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize); 539 __ PopList(1 << PC | 1 << R6 | 1 << R7); 540} 541 542void CodeGeneratorARM::Bind(HBasicBlock* block) { 543 __ Bind(GetLabelOf(block)); 544} 545 546Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const { 547 switch (load->GetType()) { 548 case Primitive::kPrimLong: 549 case Primitive::kPrimDouble: 550 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal())); 551 break; 552 553 case Primitive::kPrimInt: 554 case Primitive::kPrimNot: 555 case Primitive::kPrimFloat: 556 return Location::StackSlot(GetStackSlot(load->GetLocal())); 557 558 case Primitive::kPrimBoolean: 559 case Primitive::kPrimByte: 560 case Primitive::kPrimChar: 561 case Primitive::kPrimShort: 562 case Primitive::kPrimVoid: 563 LOG(FATAL) << "Unexpected type " << load->GetType(); 564 } 565 566 LOG(FATAL) << "Unreachable"; 567 return Location(); 568} 569 570Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) { 571 switch (type) { 572 case Primitive::kPrimBoolean: 573 case Primitive::kPrimByte: 574 case Primitive::kPrimChar: 575 case Primitive::kPrimShort: 576 case Primitive::kPrimInt: 577 case Primitive::kPrimNot: { 578 uint32_t index = gp_index_++; 579 uint32_t stack_index = stack_index_++; 580 if (index < calling_convention.GetNumberOfRegisters()) { 581 return Location::RegisterLocation(calling_convention.GetRegisterAt(index)); 582 } else { 583 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index)); 584 } 585 } 586 587 case Primitive::kPrimLong: { 588 uint32_t index = gp_index_; 589 uint32_t stack_index = stack_index_; 590 gp_index_ += 2; 591 stack_index_ += 2; 592 if (index + 1 < calling_convention.GetNumberOfRegisters()) { 593 if (calling_convention.GetRegisterAt(index) == R1) { 594 // Skip R1, and use R2_R3 instead. 595 gp_index_++; 596 index++; 597 } 598 } 599 if (index + 1 < calling_convention.GetNumberOfRegisters()) { 600 DCHECK_EQ(calling_convention.GetRegisterAt(index) + 1, 601 calling_convention.GetRegisterAt(index + 1)); 602 return Location::RegisterPairLocation(calling_convention.GetRegisterAt(index), 603 calling_convention.GetRegisterAt(index + 1)); 604 } else { 605 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index)); 606 } 607 } 608 609 case Primitive::kPrimFloat: { 610 uint32_t stack_index = stack_index_++; 611 if (float_index_ % 2 == 0) { 612 float_index_ = std::max(double_index_, float_index_); 613 } 614 if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) { 615 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++)); 616 } else { 617 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index)); 618 } 619 } 620 621 case Primitive::kPrimDouble: { 622 double_index_ = std::max(double_index_, RoundUp(float_index_, 2)); 623 uint32_t stack_index = stack_index_; 624 stack_index_ += 2; 625 if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) { 626 uint32_t index = double_index_; 627 double_index_ += 2; 628 DCHECK_EQ(calling_convention.GetFpuRegisterAt(index) + 1, 629 calling_convention.GetFpuRegisterAt(index + 1)); 630 DCHECK_EQ(calling_convention.GetFpuRegisterAt(index) & 1, 0); 631 return Location::FpuRegisterPairLocation( 632 calling_convention.GetFpuRegisterAt(index), 633 calling_convention.GetFpuRegisterAt(index + 1)); 634 } else { 635 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index)); 636 } 637 } 638 639 case Primitive::kPrimVoid: 640 LOG(FATAL) << "Unexpected parameter type " << type; 641 break; 642 } 643 return Location(); 644} 645 646Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) { 647 switch (type) { 648 case Primitive::kPrimBoolean: 649 case Primitive::kPrimByte: 650 case Primitive::kPrimChar: 651 case Primitive::kPrimShort: 652 case Primitive::kPrimInt: 653 case Primitive::kPrimNot: { 654 return Location::RegisterLocation(R0); 655 } 656 657 case Primitive::kPrimFloat: { 658 return Location::FpuRegisterLocation(S0); 659 } 660 661 case Primitive::kPrimLong: { 662 return Location::RegisterPairLocation(R0, R1); 663 } 664 665 case Primitive::kPrimDouble: { 666 return Location::FpuRegisterPairLocation(S0, S1); 667 } 668 669 case Primitive::kPrimVoid: 670 return Location(); 671 } 672 UNREACHABLE(); 673 return Location(); 674} 675 676void CodeGeneratorARM::Move32(Location destination, Location source) { 677 if (source.Equals(destination)) { 678 return; 679 } 680 if (destination.IsRegister()) { 681 if (source.IsRegister()) { 682 __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>()); 683 } else if (source.IsFpuRegister()) { 684 __ vmovrs(destination.AsRegister<Register>(), source.AsFpuRegister<SRegister>()); 685 } else { 686 __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex()); 687 } 688 } else if (destination.IsFpuRegister()) { 689 if (source.IsRegister()) { 690 __ vmovsr(destination.AsFpuRegister<SRegister>(), source.AsRegister<Register>()); 691 } else if (source.IsFpuRegister()) { 692 __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>()); 693 } else { 694 __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex()); 695 } 696 } else { 697 DCHECK(destination.IsStackSlot()) << destination; 698 if (source.IsRegister()) { 699 __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex()); 700 } else if (source.IsFpuRegister()) { 701 __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex()); 702 } else { 703 DCHECK(source.IsStackSlot()) << source; 704 __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex()); 705 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 706 } 707 } 708} 709 710void CodeGeneratorARM::Move64(Location destination, Location source) { 711 if (source.Equals(destination)) { 712 return; 713 } 714 if (destination.IsRegisterPair()) { 715 if (source.IsRegisterPair()) { 716 EmitParallelMoves( 717 Location::RegisterLocation(source.AsRegisterPairHigh<Register>()), 718 Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()), 719 Location::RegisterLocation(source.AsRegisterPairLow<Register>()), 720 Location::RegisterLocation(destination.AsRegisterPairLow<Register>())); 721 } else if (source.IsFpuRegister()) { 722 UNIMPLEMENTED(FATAL); 723 } else { 724 // No conflict possible, so just do the moves. 725 DCHECK(source.IsDoubleStackSlot()); 726 if (destination.AsRegisterPairLow<Register>() == R1) { 727 DCHECK_EQ(destination.AsRegisterPairHigh<Register>(), R2); 728 __ LoadFromOffset(kLoadWord, R1, SP, source.GetStackIndex()); 729 __ LoadFromOffset(kLoadWord, R2, SP, source.GetHighStackIndex(kArmWordSize)); 730 } else { 731 __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(), 732 SP, source.GetStackIndex()); 733 } 734 } 735 } else if (destination.IsFpuRegisterPair()) { 736 if (source.IsDoubleStackSlot()) { 737 __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()), 738 SP, 739 source.GetStackIndex()); 740 } else { 741 UNIMPLEMENTED(FATAL); 742 } 743 } else { 744 DCHECK(destination.IsDoubleStackSlot()); 745 if (source.IsRegisterPair()) { 746 // No conflict possible, so just do the moves. 747 if (source.AsRegisterPairLow<Register>() == R1) { 748 DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2); 749 __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex()); 750 __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize)); 751 } else { 752 __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(), 753 SP, destination.GetStackIndex()); 754 } 755 } else if (source.IsFpuRegisterPair()) { 756 __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()), 757 SP, 758 destination.GetStackIndex()); 759 } else { 760 DCHECK(source.IsDoubleStackSlot()); 761 EmitParallelMoves( 762 Location::StackSlot(source.GetStackIndex()), 763 Location::StackSlot(destination.GetStackIndex()), 764 Location::StackSlot(source.GetHighStackIndex(kArmWordSize)), 765 Location::StackSlot(destination.GetHighStackIndex(kArmWordSize))); 766 } 767 } 768} 769 770void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) { 771 LocationSummary* locations = instruction->GetLocations(); 772 if (locations != nullptr && locations->Out().Equals(location)) { 773 return; 774 } 775 776 if (locations != nullptr && locations->Out().IsConstant()) { 777 HConstant* const_to_move = locations->Out().GetConstant(); 778 if (const_to_move->IsIntConstant()) { 779 int32_t value = const_to_move->AsIntConstant()->GetValue(); 780 if (location.IsRegister()) { 781 __ LoadImmediate(location.AsRegister<Register>(), value); 782 } else { 783 DCHECK(location.IsStackSlot()); 784 __ LoadImmediate(IP, value); 785 __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex()); 786 } 787 } else { 788 DCHECK(const_to_move->IsLongConstant()) << const_to_move; 789 int64_t value = const_to_move->AsLongConstant()->GetValue(); 790 if (location.IsRegisterPair()) { 791 __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value)); 792 __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value)); 793 } else { 794 DCHECK(location.IsDoubleStackSlot()); 795 __ LoadImmediate(IP, Low32Bits(value)); 796 __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex()); 797 __ LoadImmediate(IP, High32Bits(value)); 798 __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize)); 799 } 800 } 801 } else if (instruction->IsLoadLocal()) { 802 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal()); 803 switch (instruction->GetType()) { 804 case Primitive::kPrimBoolean: 805 case Primitive::kPrimByte: 806 case Primitive::kPrimChar: 807 case Primitive::kPrimShort: 808 case Primitive::kPrimInt: 809 case Primitive::kPrimNot: 810 case Primitive::kPrimFloat: 811 Move32(location, Location::StackSlot(stack_slot)); 812 break; 813 814 case Primitive::kPrimLong: 815 case Primitive::kPrimDouble: 816 Move64(location, Location::DoubleStackSlot(stack_slot)); 817 break; 818 819 default: 820 LOG(FATAL) << "Unexpected type " << instruction->GetType(); 821 } 822 } else if (instruction->IsTemporary()) { 823 Location temp_location = GetTemporaryLocation(instruction->AsTemporary()); 824 if (temp_location.IsStackSlot()) { 825 Move32(location, temp_location); 826 } else { 827 DCHECK(temp_location.IsDoubleStackSlot()); 828 Move64(location, temp_location); 829 } 830 } else { 831 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary()); 832 switch (instruction->GetType()) { 833 case Primitive::kPrimBoolean: 834 case Primitive::kPrimByte: 835 case Primitive::kPrimChar: 836 case Primitive::kPrimShort: 837 case Primitive::kPrimNot: 838 case Primitive::kPrimInt: 839 case Primitive::kPrimFloat: 840 Move32(location, locations->Out()); 841 break; 842 843 case Primitive::kPrimLong: 844 case Primitive::kPrimDouble: 845 Move64(location, locations->Out()); 846 break; 847 848 default: 849 LOG(FATAL) << "Unexpected type " << instruction->GetType(); 850 } 851 } 852} 853 854void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset, 855 HInstruction* instruction, 856 uint32_t dex_pc) { 857 __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset); 858 __ blx(LR); 859 RecordPcInfo(instruction, dex_pc); 860 DCHECK(instruction->IsSuspendCheck() 861 || instruction->IsBoundsCheck() 862 || instruction->IsNullCheck() 863 || instruction->IsDivZeroCheck() 864 || instruction->GetLocations()->CanCall() 865 || !IsLeafMethod()); 866} 867 868void LocationsBuilderARM::VisitGoto(HGoto* got) { 869 got->SetLocations(nullptr); 870} 871 872void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) { 873 HBasicBlock* successor = got->GetSuccessor(); 874 DCHECK(!successor->IsExitBlock()); 875 876 HBasicBlock* block = got->GetBlock(); 877 HInstruction* previous = got->GetPrevious(); 878 879 HLoopInformation* info = block->GetLoopInformation(); 880 if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) { 881 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck()); 882 GenerateSuspendCheck(info->GetSuspendCheck(), successor); 883 return; 884 } 885 886 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) { 887 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr); 888 } 889 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) { 890 __ b(codegen_->GetLabelOf(successor)); 891 } 892} 893 894void LocationsBuilderARM::VisitExit(HExit* exit) { 895 exit->SetLocations(nullptr); 896} 897 898void InstructionCodeGeneratorARM::VisitExit(HExit* exit) { 899 UNUSED(exit); 900 if (kIsDebugBuild) { 901 __ Comment("Unreachable"); 902 __ bkpt(0); 903 } 904} 905 906void LocationsBuilderARM::VisitIf(HIf* if_instr) { 907 LocationSummary* locations = 908 new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall); 909 HInstruction* cond = if_instr->InputAt(0); 910 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) { 911 locations->SetInAt(0, Location::RequiresRegister()); 912 } 913} 914 915void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) { 916 HInstruction* cond = if_instr->InputAt(0); 917 if (cond->IsIntConstant()) { 918 // Constant condition, statically compared against 1. 919 int32_t cond_value = cond->AsIntConstant()->GetValue(); 920 if (cond_value == 1) { 921 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), 922 if_instr->IfTrueSuccessor())) { 923 __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor())); 924 } 925 return; 926 } else { 927 DCHECK_EQ(cond_value, 0); 928 } 929 } else { 930 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) { 931 // Condition has been materialized, compare the output to 0 932 DCHECK(if_instr->GetLocations()->InAt(0).IsRegister()); 933 __ cmp(if_instr->GetLocations()->InAt(0).AsRegister<Register>(), 934 ShifterOperand(0)); 935 __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE); 936 } else { 937 // Condition has not been materialized, use its inputs as the 938 // comparison and its condition as the branch condition. 939 LocationSummary* locations = cond->GetLocations(); 940 Register left = locations->InAt(0).AsRegister<Register>(); 941 if (locations->InAt(1).IsRegister()) { 942 __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>())); 943 } else { 944 DCHECK(locations->InAt(1).IsConstant()); 945 int32_t value = 946 locations->InAt(1).GetConstant()->AsIntConstant()->GetValue(); 947 ShifterOperand operand; 948 if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) { 949 __ cmp(left, operand); 950 } else { 951 Register temp = IP; 952 __ LoadImmediate(temp, value); 953 __ cmp(left, ShifterOperand(temp)); 954 } 955 } 956 __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), 957 ARMCondition(cond->AsCondition()->GetCondition())); 958 } 959 } 960 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), 961 if_instr->IfFalseSuccessor())) { 962 __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor())); 963 } 964} 965 966 967void LocationsBuilderARM::VisitCondition(HCondition* comp) { 968 LocationSummary* locations = 969 new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall); 970 locations->SetInAt(0, Location::RequiresRegister()); 971 locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1))); 972 if (comp->NeedsMaterialization()) { 973 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 974 } 975} 976 977void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) { 978 if (!comp->NeedsMaterialization()) return; 979 LocationSummary* locations = comp->GetLocations(); 980 Register left = locations->InAt(0).AsRegister<Register>(); 981 982 if (locations->InAt(1).IsRegister()) { 983 __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>())); 984 } else { 985 DCHECK(locations->InAt(1).IsConstant()); 986 int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue(); 987 ShifterOperand operand; 988 if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) { 989 __ cmp(left, operand); 990 } else { 991 Register temp = IP; 992 __ LoadImmediate(temp, value); 993 __ cmp(left, ShifterOperand(temp)); 994 } 995 } 996 __ it(ARMCondition(comp->GetCondition()), kItElse); 997 __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(1), 998 ARMCondition(comp->GetCondition())); 999 __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(0), 1000 ARMOppositeCondition(comp->GetCondition())); 1001} 1002 1003void LocationsBuilderARM::VisitEqual(HEqual* comp) { 1004 VisitCondition(comp); 1005} 1006 1007void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) { 1008 VisitCondition(comp); 1009} 1010 1011void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) { 1012 VisitCondition(comp); 1013} 1014 1015void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) { 1016 VisitCondition(comp); 1017} 1018 1019void LocationsBuilderARM::VisitLessThan(HLessThan* comp) { 1020 VisitCondition(comp); 1021} 1022 1023void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) { 1024 VisitCondition(comp); 1025} 1026 1027void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) { 1028 VisitCondition(comp); 1029} 1030 1031void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) { 1032 VisitCondition(comp); 1033} 1034 1035void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) { 1036 VisitCondition(comp); 1037} 1038 1039void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) { 1040 VisitCondition(comp); 1041} 1042 1043void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) { 1044 VisitCondition(comp); 1045} 1046 1047void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) { 1048 VisitCondition(comp); 1049} 1050 1051void LocationsBuilderARM::VisitLocal(HLocal* local) { 1052 local->SetLocations(nullptr); 1053} 1054 1055void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) { 1056 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock()); 1057} 1058 1059void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) { 1060 load->SetLocations(nullptr); 1061} 1062 1063void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) { 1064 // Nothing to do, this is driven by the code generator. 1065 UNUSED(load); 1066} 1067 1068void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) { 1069 LocationSummary* locations = 1070 new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall); 1071 switch (store->InputAt(1)->GetType()) { 1072 case Primitive::kPrimBoolean: 1073 case Primitive::kPrimByte: 1074 case Primitive::kPrimChar: 1075 case Primitive::kPrimShort: 1076 case Primitive::kPrimInt: 1077 case Primitive::kPrimNot: 1078 case Primitive::kPrimFloat: 1079 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal()))); 1080 break; 1081 1082 case Primitive::kPrimLong: 1083 case Primitive::kPrimDouble: 1084 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal()))); 1085 break; 1086 1087 default: 1088 LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType(); 1089 } 1090} 1091 1092void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) { 1093 UNUSED(store); 1094} 1095 1096void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) { 1097 LocationSummary* locations = 1098 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 1099 locations->SetOut(Location::ConstantLocation(constant)); 1100} 1101 1102void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) { 1103 // Will be generated at use site. 1104 UNUSED(constant); 1105} 1106 1107void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) { 1108 LocationSummary* locations = 1109 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 1110 locations->SetOut(Location::ConstantLocation(constant)); 1111} 1112 1113void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) { 1114 // Will be generated at use site. 1115 UNUSED(constant); 1116} 1117 1118void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) { 1119 LocationSummary* locations = 1120 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 1121 locations->SetOut(Location::ConstantLocation(constant)); 1122} 1123 1124void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) { 1125 // Will be generated at use site. 1126 UNUSED(constant); 1127} 1128 1129void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) { 1130 LocationSummary* locations = 1131 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 1132 locations->SetOut(Location::ConstantLocation(constant)); 1133} 1134 1135void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) { 1136 // Will be generated at use site. 1137 UNUSED(constant); 1138} 1139 1140void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) { 1141 ret->SetLocations(nullptr); 1142} 1143 1144void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) { 1145 UNUSED(ret); 1146 codegen_->GenerateFrameExit(); 1147} 1148 1149void LocationsBuilderARM::VisitReturn(HReturn* ret) { 1150 LocationSummary* locations = 1151 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall); 1152 locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType())); 1153} 1154 1155void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) { 1156 UNUSED(ret); 1157 codegen_->GenerateFrameExit(); 1158} 1159 1160void LocationsBuilderARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) { 1161 HandleInvoke(invoke); 1162} 1163 1164void CodeGeneratorARM::LoadCurrentMethod(Register reg) { 1165 __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset); 1166} 1167 1168void InstructionCodeGeneratorARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) { 1169 Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>(); 1170 1171 // TODO: Implement all kinds of calls: 1172 // 1) boot -> boot 1173 // 2) app -> boot 1174 // 3) app -> app 1175 // 1176 // Currently we implement the app -> app logic, which looks up in the resolve cache. 1177 1178 // temp = method; 1179 codegen_->LoadCurrentMethod(temp); 1180 // temp = temp->dex_cache_resolved_methods_; 1181 __ LoadFromOffset( 1182 kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()); 1183 // temp = temp[index_in_cache] 1184 __ LoadFromOffset( 1185 kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetDexMethodIndex())); 1186 // LR = temp[offset_of_quick_compiled_code] 1187 __ LoadFromOffset(kLoadWord, LR, temp, 1188 mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset( 1189 kArmWordSize).Int32Value()); 1190 // LR() 1191 __ blx(LR); 1192 1193 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 1194 DCHECK(!codegen_->IsLeafMethod()); 1195} 1196 1197void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) { 1198 LocationSummary* locations = 1199 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall); 1200 locations->AddTemp(Location::RegisterLocation(R0)); 1201 1202 InvokeDexCallingConventionVisitor calling_convention_visitor; 1203 for (size_t i = 0; i < invoke->InputCount(); i++) { 1204 HInstruction* input = invoke->InputAt(i); 1205 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType())); 1206 } 1207 1208 locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType())); 1209} 1210 1211void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) { 1212 HandleInvoke(invoke); 1213} 1214 1215void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) { 1216 Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>(); 1217 uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() + 1218 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry); 1219 LocationSummary* locations = invoke->GetLocations(); 1220 Location receiver = locations->InAt(0); 1221 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); 1222 // temp = object->GetClass(); 1223 if (receiver.IsStackSlot()) { 1224 __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex()); 1225 __ LoadFromOffset(kLoadWord, temp, temp, class_offset); 1226 } else { 1227 __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset); 1228 } 1229 // temp = temp->GetMethodAt(method_offset); 1230 uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset( 1231 kArmWordSize).Int32Value(); 1232 __ LoadFromOffset(kLoadWord, temp, temp, method_offset); 1233 // LR = temp->GetEntryPoint(); 1234 __ LoadFromOffset(kLoadWord, LR, temp, entry_point); 1235 // LR(); 1236 __ blx(LR); 1237 DCHECK(!codegen_->IsLeafMethod()); 1238 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 1239} 1240 1241void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) { 1242 HandleInvoke(invoke); 1243 // Add the hidden argument. 1244 invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12)); 1245} 1246 1247void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) { 1248 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError. 1249 Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>(); 1250 uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() + 1251 (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry); 1252 LocationSummary* locations = invoke->GetLocations(); 1253 Location receiver = locations->InAt(0); 1254 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); 1255 1256 // Set the hidden argument. 1257 __ LoadImmediate(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(), 1258 invoke->GetDexMethodIndex()); 1259 1260 // temp = object->GetClass(); 1261 if (receiver.IsStackSlot()) { 1262 __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex()); 1263 __ LoadFromOffset(kLoadWord, temp, temp, class_offset); 1264 } else { 1265 __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset); 1266 } 1267 // temp = temp->GetImtEntryAt(method_offset); 1268 uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset( 1269 kArmWordSize).Int32Value(); 1270 __ LoadFromOffset(kLoadWord, temp, temp, method_offset); 1271 // LR = temp->GetEntryPoint(); 1272 __ LoadFromOffset(kLoadWord, LR, temp, entry_point); 1273 // LR(); 1274 __ blx(LR); 1275 DCHECK(!codegen_->IsLeafMethod()); 1276 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 1277} 1278 1279void LocationsBuilderARM::VisitNeg(HNeg* neg) { 1280 LocationSummary* locations = 1281 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall); 1282 switch (neg->GetResultType()) { 1283 case Primitive::kPrimInt: 1284 case Primitive::kPrimLong: { 1285 bool output_overlaps = (neg->GetResultType() == Primitive::kPrimLong); 1286 locations->SetInAt(0, Location::RequiresRegister()); 1287 locations->SetOut(Location::RequiresRegister(), output_overlaps); 1288 break; 1289 } 1290 1291 case Primitive::kPrimFloat: 1292 case Primitive::kPrimDouble: 1293 locations->SetInAt(0, Location::RequiresFpuRegister()); 1294 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1295 break; 1296 1297 default: 1298 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType(); 1299 } 1300} 1301 1302void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) { 1303 LocationSummary* locations = neg->GetLocations(); 1304 Location out = locations->Out(); 1305 Location in = locations->InAt(0); 1306 switch (neg->GetResultType()) { 1307 case Primitive::kPrimInt: 1308 DCHECK(in.IsRegister()); 1309 __ rsb(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(0)); 1310 break; 1311 1312 case Primitive::kPrimLong: 1313 DCHECK(in.IsRegisterPair()); 1314 // out.lo = 0 - in.lo (and update the carry/borrow (C) flag) 1315 __ rsbs(out.AsRegisterPairLow<Register>(), 1316 in.AsRegisterPairLow<Register>(), 1317 ShifterOperand(0)); 1318 // We cannot emit an RSC (Reverse Subtract with Carry) 1319 // instruction here, as it does not exist in the Thumb-2 1320 // instruction set. We use the following approach 1321 // using SBC and SUB instead. 1322 // 1323 // out.hi = -C 1324 __ sbc(out.AsRegisterPairHigh<Register>(), 1325 out.AsRegisterPairHigh<Register>(), 1326 ShifterOperand(out.AsRegisterPairHigh<Register>())); 1327 // out.hi = out.hi - in.hi 1328 __ sub(out.AsRegisterPairHigh<Register>(), 1329 out.AsRegisterPairHigh<Register>(), 1330 ShifterOperand(in.AsRegisterPairHigh<Register>())); 1331 break; 1332 1333 case Primitive::kPrimFloat: 1334 DCHECK(in.IsFpuRegister()); 1335 __ vnegs(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>()); 1336 break; 1337 1338 case Primitive::kPrimDouble: 1339 DCHECK(in.IsFpuRegisterPair()); 1340 __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 1341 FromLowSToD(in.AsFpuRegisterPairLow<SRegister>())); 1342 break; 1343 1344 default: 1345 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType(); 1346 } 1347} 1348 1349void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) { 1350 Primitive::Type result_type = conversion->GetResultType(); 1351 Primitive::Type input_type = conversion->GetInputType(); 1352 DCHECK_NE(result_type, input_type); 1353 1354 // The float-to-long and double-to-long type conversions rely on a 1355 // call to the runtime. 1356 LocationSummary::CallKind call_kind = 1357 ((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble) 1358 && result_type == Primitive::kPrimLong) 1359 ? LocationSummary::kCall 1360 : LocationSummary::kNoCall; 1361 LocationSummary* locations = 1362 new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind); 1363 1364 switch (result_type) { 1365 case Primitive::kPrimByte: 1366 switch (input_type) { 1367 case Primitive::kPrimShort: 1368 case Primitive::kPrimInt: 1369 case Primitive::kPrimChar: 1370 // Processing a Dex `int-to-byte' instruction. 1371 locations->SetInAt(0, Location::RequiresRegister()); 1372 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1373 break; 1374 1375 default: 1376 LOG(FATAL) << "Unexpected type conversion from " << input_type 1377 << " to " << result_type; 1378 } 1379 break; 1380 1381 case Primitive::kPrimShort: 1382 switch (input_type) { 1383 case Primitive::kPrimByte: 1384 case Primitive::kPrimInt: 1385 case Primitive::kPrimChar: 1386 // Processing a Dex `int-to-short' instruction. 1387 locations->SetInAt(0, Location::RequiresRegister()); 1388 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1389 break; 1390 1391 default: 1392 LOG(FATAL) << "Unexpected type conversion from " << input_type 1393 << " to " << result_type; 1394 } 1395 break; 1396 1397 case Primitive::kPrimInt: 1398 switch (input_type) { 1399 case Primitive::kPrimLong: 1400 // Processing a Dex `long-to-int' instruction. 1401 locations->SetInAt(0, Location::Any()); 1402 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1403 break; 1404 1405 case Primitive::kPrimFloat: 1406 // Processing a Dex `float-to-int' instruction. 1407 locations->SetInAt(0, Location::RequiresFpuRegister()); 1408 locations->SetOut(Location::RequiresRegister()); 1409 locations->AddTemp(Location::RequiresFpuRegister()); 1410 break; 1411 1412 case Primitive::kPrimDouble: 1413 // Processing a Dex `double-to-int' instruction. 1414 locations->SetInAt(0, Location::RequiresFpuRegister()); 1415 locations->SetOut(Location::RequiresRegister()); 1416 locations->AddTemp(Location::RequiresFpuRegister()); 1417 break; 1418 1419 default: 1420 LOG(FATAL) << "Unexpected type conversion from " << input_type 1421 << " to " << result_type; 1422 } 1423 break; 1424 1425 case Primitive::kPrimLong: 1426 switch (input_type) { 1427 case Primitive::kPrimByte: 1428 case Primitive::kPrimShort: 1429 case Primitive::kPrimInt: 1430 case Primitive::kPrimChar: 1431 // Processing a Dex `int-to-long' instruction. 1432 locations->SetInAt(0, Location::RequiresRegister()); 1433 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1434 break; 1435 1436 case Primitive::kPrimFloat: { 1437 // Processing a Dex `float-to-long' instruction. 1438 InvokeRuntimeCallingConvention calling_convention; 1439 locations->SetInAt(0, Location::FpuRegisterLocation( 1440 calling_convention.GetFpuRegisterAt(0))); 1441 locations->SetOut(Location::RegisterPairLocation(R0, R1)); 1442 break; 1443 } 1444 1445 case Primitive::kPrimDouble: { 1446 // Processing a Dex `double-to-long' instruction. 1447 InvokeRuntimeCallingConvention calling_convention; 1448 locations->SetInAt(0, Location::FpuRegisterPairLocation( 1449 calling_convention.GetFpuRegisterAt(0), 1450 calling_convention.GetFpuRegisterAt(1))); 1451 locations->SetOut(Location::RegisterPairLocation(R0, R1)); 1452 break; 1453 } 1454 1455 default: 1456 LOG(FATAL) << "Unexpected type conversion from " << input_type 1457 << " to " << result_type; 1458 } 1459 break; 1460 1461 case Primitive::kPrimChar: 1462 switch (input_type) { 1463 case Primitive::kPrimByte: 1464 case Primitive::kPrimShort: 1465 case Primitive::kPrimInt: 1466 // Processing a Dex `int-to-char' instruction. 1467 locations->SetInAt(0, Location::RequiresRegister()); 1468 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1469 break; 1470 1471 default: 1472 LOG(FATAL) << "Unexpected type conversion from " << input_type 1473 << " to " << result_type; 1474 } 1475 break; 1476 1477 case Primitive::kPrimFloat: 1478 switch (input_type) { 1479 case Primitive::kPrimByte: 1480 case Primitive::kPrimShort: 1481 case Primitive::kPrimInt: 1482 case Primitive::kPrimChar: 1483 // Processing a Dex `int-to-float' instruction. 1484 locations->SetInAt(0, Location::RequiresRegister()); 1485 locations->SetOut(Location::RequiresFpuRegister()); 1486 break; 1487 1488 case Primitive::kPrimLong: 1489 // Processing a Dex `long-to-float' instruction. 1490 locations->SetInAt(0, Location::RequiresRegister()); 1491 locations->SetOut(Location::RequiresFpuRegister()); 1492 locations->AddTemp(Location::RequiresRegister()); 1493 locations->AddTemp(Location::RequiresRegister()); 1494 locations->AddTemp(Location::RequiresFpuRegister()); 1495 locations->AddTemp(Location::RequiresFpuRegister()); 1496 break; 1497 1498 case Primitive::kPrimDouble: 1499 // Processing a Dex `double-to-float' instruction. 1500 locations->SetInAt(0, Location::RequiresFpuRegister()); 1501 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1502 break; 1503 1504 default: 1505 LOG(FATAL) << "Unexpected type conversion from " << input_type 1506 << " to " << result_type; 1507 }; 1508 break; 1509 1510 case Primitive::kPrimDouble: 1511 switch (input_type) { 1512 case Primitive::kPrimByte: 1513 case Primitive::kPrimShort: 1514 case Primitive::kPrimInt: 1515 case Primitive::kPrimChar: 1516 // Processing a Dex `int-to-double' instruction. 1517 locations->SetInAt(0, Location::RequiresRegister()); 1518 locations->SetOut(Location::RequiresFpuRegister()); 1519 break; 1520 1521 case Primitive::kPrimLong: 1522 // Processing a Dex `long-to-double' instruction. 1523 locations->SetInAt(0, Location::RequiresRegister()); 1524 locations->SetOut(Location::RequiresFpuRegister()); 1525 locations->AddTemp(Location::RequiresRegister()); 1526 locations->AddTemp(Location::RequiresRegister()); 1527 locations->AddTemp(Location::RequiresFpuRegister()); 1528 break; 1529 1530 case Primitive::kPrimFloat: 1531 // Processing a Dex `float-to-double' instruction. 1532 locations->SetInAt(0, Location::RequiresFpuRegister()); 1533 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1534 break; 1535 1536 default: 1537 LOG(FATAL) << "Unexpected type conversion from " << input_type 1538 << " to " << result_type; 1539 }; 1540 break; 1541 1542 default: 1543 LOG(FATAL) << "Unexpected type conversion from " << input_type 1544 << " to " << result_type; 1545 } 1546} 1547 1548void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) { 1549 LocationSummary* locations = conversion->GetLocations(); 1550 Location out = locations->Out(); 1551 Location in = locations->InAt(0); 1552 Primitive::Type result_type = conversion->GetResultType(); 1553 Primitive::Type input_type = conversion->GetInputType(); 1554 DCHECK_NE(result_type, input_type); 1555 switch (result_type) { 1556 case Primitive::kPrimByte: 1557 switch (input_type) { 1558 case Primitive::kPrimShort: 1559 case Primitive::kPrimInt: 1560 case Primitive::kPrimChar: 1561 // Processing a Dex `int-to-byte' instruction. 1562 __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 8); 1563 break; 1564 1565 default: 1566 LOG(FATAL) << "Unexpected type conversion from " << input_type 1567 << " to " << result_type; 1568 } 1569 break; 1570 1571 case Primitive::kPrimShort: 1572 switch (input_type) { 1573 case Primitive::kPrimByte: 1574 case Primitive::kPrimInt: 1575 case Primitive::kPrimChar: 1576 // Processing a Dex `int-to-short' instruction. 1577 __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16); 1578 break; 1579 1580 default: 1581 LOG(FATAL) << "Unexpected type conversion from " << input_type 1582 << " to " << result_type; 1583 } 1584 break; 1585 1586 case Primitive::kPrimInt: 1587 switch (input_type) { 1588 case Primitive::kPrimLong: 1589 // Processing a Dex `long-to-int' instruction. 1590 DCHECK(out.IsRegister()); 1591 if (in.IsRegisterPair()) { 1592 __ Mov(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>()); 1593 } else if (in.IsDoubleStackSlot()) { 1594 __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), SP, in.GetStackIndex()); 1595 } else { 1596 DCHECK(in.IsConstant()); 1597 DCHECK(in.GetConstant()->IsLongConstant()); 1598 int64_t value = in.GetConstant()->AsLongConstant()->GetValue(); 1599 __ LoadImmediate(out.AsRegister<Register>(), static_cast<int32_t>(value)); 1600 } 1601 break; 1602 1603 case Primitive::kPrimFloat: { 1604 // Processing a Dex `float-to-int' instruction. 1605 SRegister temp = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>(); 1606 __ vmovs(temp, in.AsFpuRegister<SRegister>()); 1607 __ vcvtis(temp, temp); 1608 __ vmovrs(out.AsRegister<Register>(), temp); 1609 break; 1610 } 1611 1612 case Primitive::kPrimDouble: { 1613 // Processing a Dex `double-to-int' instruction. 1614 SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>(); 1615 DRegister temp_d = FromLowSToD(temp_s); 1616 __ vmovd(temp_d, FromLowSToD(in.AsFpuRegisterPairLow<SRegister>())); 1617 __ vcvtid(temp_s, temp_d); 1618 __ vmovrs(out.AsRegister<Register>(), temp_s); 1619 break; 1620 } 1621 1622 default: 1623 LOG(FATAL) << "Unexpected type conversion from " << input_type 1624 << " to " << result_type; 1625 } 1626 break; 1627 1628 case Primitive::kPrimLong: 1629 switch (input_type) { 1630 case Primitive::kPrimByte: 1631 case Primitive::kPrimShort: 1632 case Primitive::kPrimInt: 1633 case Primitive::kPrimChar: 1634 // Processing a Dex `int-to-long' instruction. 1635 DCHECK(out.IsRegisterPair()); 1636 DCHECK(in.IsRegister()); 1637 __ Mov(out.AsRegisterPairLow<Register>(), in.AsRegister<Register>()); 1638 // Sign extension. 1639 __ Asr(out.AsRegisterPairHigh<Register>(), 1640 out.AsRegisterPairLow<Register>(), 1641 31); 1642 break; 1643 1644 case Primitive::kPrimFloat: 1645 // Processing a Dex `float-to-long' instruction. 1646 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pF2l), 1647 conversion, 1648 conversion->GetDexPc()); 1649 break; 1650 1651 case Primitive::kPrimDouble: 1652 // Processing a Dex `double-to-long' instruction. 1653 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pD2l), 1654 conversion, 1655 conversion->GetDexPc()); 1656 break; 1657 1658 default: 1659 LOG(FATAL) << "Unexpected type conversion from " << input_type 1660 << " to " << result_type; 1661 } 1662 break; 1663 1664 case Primitive::kPrimChar: 1665 switch (input_type) { 1666 case Primitive::kPrimByte: 1667 case Primitive::kPrimShort: 1668 case Primitive::kPrimInt: 1669 // Processing a Dex `int-to-char' instruction. 1670 __ ubfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16); 1671 break; 1672 1673 default: 1674 LOG(FATAL) << "Unexpected type conversion from " << input_type 1675 << " to " << result_type; 1676 } 1677 break; 1678 1679 case Primitive::kPrimFloat: 1680 switch (input_type) { 1681 case Primitive::kPrimByte: 1682 case Primitive::kPrimShort: 1683 case Primitive::kPrimInt: 1684 case Primitive::kPrimChar: { 1685 // Processing a Dex `int-to-float' instruction. 1686 __ vmovsr(out.AsFpuRegister<SRegister>(), in.AsRegister<Register>()); 1687 __ vcvtsi(out.AsFpuRegister<SRegister>(), out.AsFpuRegister<SRegister>()); 1688 break; 1689 } 1690 1691 case Primitive::kPrimLong: { 1692 // Processing a Dex `long-to-float' instruction. 1693 Register low = in.AsRegisterPairLow<Register>(); 1694 Register high = in.AsRegisterPairHigh<Register>(); 1695 SRegister output = out.AsFpuRegister<SRegister>(); 1696 Register constant_low = locations->GetTemp(0).AsRegister<Register>(); 1697 Register constant_high = locations->GetTemp(1).AsRegister<Register>(); 1698 SRegister temp1_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>(); 1699 DRegister temp1_d = FromLowSToD(temp1_s); 1700 SRegister temp2_s = locations->GetTemp(3).AsFpuRegisterPairLow<SRegister>(); 1701 DRegister temp2_d = FromLowSToD(temp2_s); 1702 1703 // Operations use doubles for precision reasons (each 32-bit 1704 // half of a long fits in the 53-bit mantissa of a double, 1705 // but not in the 24-bit mantissa of a float). This is 1706 // especially important for the low bits. The result is 1707 // eventually converted to float. 1708 1709 // temp1_d = int-to-double(high) 1710 __ vmovsr(temp1_s, high); 1711 __ vcvtdi(temp1_d, temp1_s); 1712 // Using vmovd to load the `k2Pow32EncodingForDouble` constant 1713 // as an immediate value into `temp2_d` does not work, as 1714 // this instruction only transfers 8 significant bits of its 1715 // immediate operand. Instead, use two 32-bit core 1716 // registers to load `k2Pow32EncodingForDouble` into 1717 // `temp2_d`. 1718 __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble)); 1719 __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble)); 1720 __ vmovdrr(temp2_d, constant_low, constant_high); 1721 // temp1_d = temp1_d * 2^32 1722 __ vmuld(temp1_d, temp1_d, temp2_d); 1723 // temp2_d = unsigned-to-double(low) 1724 __ vmovsr(temp2_s, low); 1725 __ vcvtdu(temp2_d, temp2_s); 1726 // temp1_d = temp1_d + temp2_d 1727 __ vaddd(temp1_d, temp1_d, temp2_d); 1728 // output = double-to-float(temp1_d); 1729 __ vcvtsd(output, temp1_d); 1730 break; 1731 } 1732 1733 case Primitive::kPrimDouble: 1734 // Processing a Dex `double-to-float' instruction. 1735 __ vcvtsd(out.AsFpuRegister<SRegister>(), 1736 FromLowSToD(in.AsFpuRegisterPairLow<SRegister>())); 1737 break; 1738 1739 default: 1740 LOG(FATAL) << "Unexpected type conversion from " << input_type 1741 << " to " << result_type; 1742 }; 1743 break; 1744 1745 case Primitive::kPrimDouble: 1746 switch (input_type) { 1747 case Primitive::kPrimByte: 1748 case Primitive::kPrimShort: 1749 case Primitive::kPrimInt: 1750 case Primitive::kPrimChar: { 1751 // Processing a Dex `int-to-double' instruction. 1752 __ vmovsr(out.AsFpuRegisterPairLow<SRegister>(), in.AsRegister<Register>()); 1753 __ vcvtdi(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 1754 out.AsFpuRegisterPairLow<SRegister>()); 1755 break; 1756 } 1757 1758 case Primitive::kPrimLong: { 1759 // Processing a Dex `long-to-double' instruction. 1760 Register low = in.AsRegisterPairLow<Register>(); 1761 Register high = in.AsRegisterPairHigh<Register>(); 1762 SRegister out_s = out.AsFpuRegisterPairLow<SRegister>(); 1763 DRegister out_d = FromLowSToD(out_s); 1764 Register constant_low = locations->GetTemp(0).AsRegister<Register>(); 1765 Register constant_high = locations->GetTemp(1).AsRegister<Register>(); 1766 SRegister temp_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>(); 1767 DRegister temp_d = FromLowSToD(temp_s); 1768 1769 // out_d = int-to-double(high) 1770 __ vmovsr(out_s, high); 1771 __ vcvtdi(out_d, out_s); 1772 // Using vmovd to load the `k2Pow32EncodingForDouble` constant 1773 // as an immediate value into `temp_d` does not work, as 1774 // this instruction only transfers 8 significant bits of its 1775 // immediate operand. Instead, use two 32-bit core 1776 // registers to load `k2Pow32EncodingForDouble` into `temp_d`. 1777 __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble)); 1778 __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble)); 1779 __ vmovdrr(temp_d, constant_low, constant_high); 1780 // out_d = out_d * 2^32 1781 __ vmuld(out_d, out_d, temp_d); 1782 // temp_d = unsigned-to-double(low) 1783 __ vmovsr(temp_s, low); 1784 __ vcvtdu(temp_d, temp_s); 1785 // out_d = out_d + temp_d 1786 __ vaddd(out_d, out_d, temp_d); 1787 break; 1788 } 1789 1790 case Primitive::kPrimFloat: 1791 // Processing a Dex `float-to-double' instruction. 1792 __ vcvtds(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 1793 in.AsFpuRegister<SRegister>()); 1794 break; 1795 1796 default: 1797 LOG(FATAL) << "Unexpected type conversion from " << input_type 1798 << " to " << result_type; 1799 }; 1800 break; 1801 1802 default: 1803 LOG(FATAL) << "Unexpected type conversion from " << input_type 1804 << " to " << result_type; 1805 } 1806} 1807 1808void LocationsBuilderARM::VisitAdd(HAdd* add) { 1809 LocationSummary* locations = 1810 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall); 1811 switch (add->GetResultType()) { 1812 case Primitive::kPrimInt: 1813 case Primitive::kPrimLong: { 1814 bool output_overlaps = (add->GetResultType() == Primitive::kPrimLong); 1815 locations->SetInAt(0, Location::RequiresRegister()); 1816 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1))); 1817 locations->SetOut(Location::RequiresRegister(), output_overlaps); 1818 break; 1819 } 1820 1821 case Primitive::kPrimFloat: 1822 case Primitive::kPrimDouble: { 1823 locations->SetInAt(0, Location::RequiresFpuRegister()); 1824 locations->SetInAt(1, Location::RequiresFpuRegister()); 1825 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1826 break; 1827 } 1828 1829 default: 1830 LOG(FATAL) << "Unexpected add type " << add->GetResultType(); 1831 } 1832} 1833 1834void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) { 1835 LocationSummary* locations = add->GetLocations(); 1836 Location out = locations->Out(); 1837 Location first = locations->InAt(0); 1838 Location second = locations->InAt(1); 1839 switch (add->GetResultType()) { 1840 case Primitive::kPrimInt: 1841 if (second.IsRegister()) { 1842 __ add(out.AsRegister<Register>(), 1843 first.AsRegister<Register>(), 1844 ShifterOperand(second.AsRegister<Register>())); 1845 } else { 1846 __ AddConstant(out.AsRegister<Register>(), 1847 first.AsRegister<Register>(), 1848 second.GetConstant()->AsIntConstant()->GetValue()); 1849 } 1850 break; 1851 1852 case Primitive::kPrimLong: 1853 __ adds(out.AsRegisterPairLow<Register>(), 1854 first.AsRegisterPairLow<Register>(), 1855 ShifterOperand(second.AsRegisterPairLow<Register>())); 1856 __ adc(out.AsRegisterPairHigh<Register>(), 1857 first.AsRegisterPairHigh<Register>(), 1858 ShifterOperand(second.AsRegisterPairHigh<Register>())); 1859 break; 1860 1861 case Primitive::kPrimFloat: 1862 __ vadds(out.AsFpuRegister<SRegister>(), 1863 first.AsFpuRegister<SRegister>(), 1864 second.AsFpuRegister<SRegister>()); 1865 break; 1866 1867 case Primitive::kPrimDouble: 1868 __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 1869 FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()), 1870 FromLowSToD(second.AsFpuRegisterPairLow<SRegister>())); 1871 break; 1872 1873 default: 1874 LOG(FATAL) << "Unexpected add type " << add->GetResultType(); 1875 } 1876} 1877 1878void LocationsBuilderARM::VisitSub(HSub* sub) { 1879 LocationSummary* locations = 1880 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall); 1881 switch (sub->GetResultType()) { 1882 case Primitive::kPrimInt: 1883 case Primitive::kPrimLong: { 1884 bool output_overlaps = (sub->GetResultType() == Primitive::kPrimLong); 1885 locations->SetInAt(0, Location::RequiresRegister()); 1886 locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1))); 1887 locations->SetOut(Location::RequiresRegister(), output_overlaps); 1888 break; 1889 } 1890 case Primitive::kPrimFloat: 1891 case Primitive::kPrimDouble: { 1892 locations->SetInAt(0, Location::RequiresFpuRegister()); 1893 locations->SetInAt(1, Location::RequiresFpuRegister()); 1894 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1895 break; 1896 } 1897 default: 1898 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType(); 1899 } 1900} 1901 1902void InstructionCodeGeneratorARM::VisitSub(HSub* sub) { 1903 LocationSummary* locations = sub->GetLocations(); 1904 Location out = locations->Out(); 1905 Location first = locations->InAt(0); 1906 Location second = locations->InAt(1); 1907 switch (sub->GetResultType()) { 1908 case Primitive::kPrimInt: { 1909 if (second.IsRegister()) { 1910 __ sub(out.AsRegister<Register>(), 1911 first.AsRegister<Register>(), 1912 ShifterOperand(second.AsRegister<Register>())); 1913 } else { 1914 __ AddConstant(out.AsRegister<Register>(), 1915 first.AsRegister<Register>(), 1916 -second.GetConstant()->AsIntConstant()->GetValue()); 1917 } 1918 break; 1919 } 1920 1921 case Primitive::kPrimLong: { 1922 __ subs(out.AsRegisterPairLow<Register>(), 1923 first.AsRegisterPairLow<Register>(), 1924 ShifterOperand(second.AsRegisterPairLow<Register>())); 1925 __ sbc(out.AsRegisterPairHigh<Register>(), 1926 first.AsRegisterPairHigh<Register>(), 1927 ShifterOperand(second.AsRegisterPairHigh<Register>())); 1928 break; 1929 } 1930 1931 case Primitive::kPrimFloat: { 1932 __ vsubs(out.AsFpuRegister<SRegister>(), 1933 first.AsFpuRegister<SRegister>(), 1934 second.AsFpuRegister<SRegister>()); 1935 break; 1936 } 1937 1938 case Primitive::kPrimDouble: { 1939 __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 1940 FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()), 1941 FromLowSToD(second.AsFpuRegisterPairLow<SRegister>())); 1942 break; 1943 } 1944 1945 1946 default: 1947 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType(); 1948 } 1949} 1950 1951void LocationsBuilderARM::VisitMul(HMul* mul) { 1952 LocationSummary* locations = 1953 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall); 1954 switch (mul->GetResultType()) { 1955 case Primitive::kPrimInt: 1956 case Primitive::kPrimLong: { 1957 locations->SetInAt(0, Location::RequiresRegister()); 1958 locations->SetInAt(1, Location::RequiresRegister()); 1959 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1960 break; 1961 } 1962 1963 case Primitive::kPrimFloat: 1964 case Primitive::kPrimDouble: { 1965 locations->SetInAt(0, Location::RequiresFpuRegister()); 1966 locations->SetInAt(1, Location::RequiresFpuRegister()); 1967 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1968 break; 1969 } 1970 1971 default: 1972 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType(); 1973 } 1974} 1975 1976void InstructionCodeGeneratorARM::VisitMul(HMul* mul) { 1977 LocationSummary* locations = mul->GetLocations(); 1978 Location out = locations->Out(); 1979 Location first = locations->InAt(0); 1980 Location second = locations->InAt(1); 1981 switch (mul->GetResultType()) { 1982 case Primitive::kPrimInt: { 1983 __ mul(out.AsRegister<Register>(), 1984 first.AsRegister<Register>(), 1985 second.AsRegister<Register>()); 1986 break; 1987 } 1988 case Primitive::kPrimLong: { 1989 Register out_hi = out.AsRegisterPairHigh<Register>(); 1990 Register out_lo = out.AsRegisterPairLow<Register>(); 1991 Register in1_hi = first.AsRegisterPairHigh<Register>(); 1992 Register in1_lo = first.AsRegisterPairLow<Register>(); 1993 Register in2_hi = second.AsRegisterPairHigh<Register>(); 1994 Register in2_lo = second.AsRegisterPairLow<Register>(); 1995 1996 // Extra checks to protect caused by the existence of R1_R2. 1997 // The algorithm is wrong if out.hi is either in1.lo or in2.lo: 1998 // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2); 1999 DCHECK_NE(out_hi, in1_lo); 2000 DCHECK_NE(out_hi, in2_lo); 2001 2002 // input: in1 - 64 bits, in2 - 64 bits 2003 // output: out 2004 // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo 2005 // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32] 2006 // parts: out.lo = (in1.lo * in2.lo)[31:0] 2007 2008 // IP <- in1.lo * in2.hi 2009 __ mul(IP, in1_lo, in2_hi); 2010 // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo 2011 __ mla(out_hi, in1_hi, in2_lo, IP); 2012 // out.lo <- (in1.lo * in2.lo)[31:0]; 2013 __ umull(out_lo, IP, in1_lo, in2_lo); 2014 // out.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32] 2015 __ add(out_hi, out_hi, ShifterOperand(IP)); 2016 break; 2017 } 2018 2019 case Primitive::kPrimFloat: { 2020 __ vmuls(out.AsFpuRegister<SRegister>(), 2021 first.AsFpuRegister<SRegister>(), 2022 second.AsFpuRegister<SRegister>()); 2023 break; 2024 } 2025 2026 case Primitive::kPrimDouble: { 2027 __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 2028 FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()), 2029 FromLowSToD(second.AsFpuRegisterPairLow<SRegister>())); 2030 break; 2031 } 2032 2033 default: 2034 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType(); 2035 } 2036} 2037 2038void LocationsBuilderARM::VisitDiv(HDiv* div) { 2039 LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong 2040 ? LocationSummary::kCall 2041 : LocationSummary::kNoCall; 2042 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind); 2043 2044 switch (div->GetResultType()) { 2045 case Primitive::kPrimInt: { 2046 locations->SetInAt(0, Location::RequiresRegister()); 2047 locations->SetInAt(1, Location::RequiresRegister()); 2048 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 2049 break; 2050 } 2051 case Primitive::kPrimLong: { 2052 InvokeRuntimeCallingConvention calling_convention; 2053 locations->SetInAt(0, Location::RegisterPairLocation( 2054 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1))); 2055 locations->SetInAt(1, Location::RegisterPairLocation( 2056 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3))); 2057 // The runtime helper puts the output in R0,R2. 2058 locations->SetOut(Location::RegisterPairLocation(R0, R2)); 2059 break; 2060 } 2061 case Primitive::kPrimFloat: 2062 case Primitive::kPrimDouble: { 2063 locations->SetInAt(0, Location::RequiresFpuRegister()); 2064 locations->SetInAt(1, Location::RequiresFpuRegister()); 2065 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 2066 break; 2067 } 2068 2069 default: 2070 LOG(FATAL) << "Unexpected div type " << div->GetResultType(); 2071 } 2072} 2073 2074void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) { 2075 LocationSummary* locations = div->GetLocations(); 2076 Location out = locations->Out(); 2077 Location first = locations->InAt(0); 2078 Location second = locations->InAt(1); 2079 2080 switch (div->GetResultType()) { 2081 case Primitive::kPrimInt: { 2082 __ sdiv(out.AsRegister<Register>(), 2083 first.AsRegister<Register>(), 2084 second.AsRegister<Register>()); 2085 break; 2086 } 2087 2088 case Primitive::kPrimLong: { 2089 InvokeRuntimeCallingConvention calling_convention; 2090 DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>()); 2091 DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>()); 2092 DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>()); 2093 DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>()); 2094 DCHECK_EQ(R0, out.AsRegisterPairLow<Register>()); 2095 DCHECK_EQ(R2, out.AsRegisterPairHigh<Register>()); 2096 2097 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLdiv), div, div->GetDexPc()); 2098 break; 2099 } 2100 2101 case Primitive::kPrimFloat: { 2102 __ vdivs(out.AsFpuRegister<SRegister>(), 2103 first.AsFpuRegister<SRegister>(), 2104 second.AsFpuRegister<SRegister>()); 2105 break; 2106 } 2107 2108 case Primitive::kPrimDouble: { 2109 __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 2110 FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()), 2111 FromLowSToD(second.AsFpuRegisterPairLow<SRegister>())); 2112 break; 2113 } 2114 2115 default: 2116 LOG(FATAL) << "Unexpected div type " << div->GetResultType(); 2117 } 2118} 2119 2120void LocationsBuilderARM::VisitRem(HRem* rem) { 2121 Primitive::Type type = rem->GetResultType(); 2122 LocationSummary::CallKind call_kind = type == Primitive::kPrimInt 2123 ? LocationSummary::kNoCall 2124 : LocationSummary::kCall; 2125 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind); 2126 2127 switch (type) { 2128 case Primitive::kPrimInt: { 2129 locations->SetInAt(0, Location::RequiresRegister()); 2130 locations->SetInAt(1, Location::RequiresRegister()); 2131 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 2132 locations->AddTemp(Location::RequiresRegister()); 2133 break; 2134 } 2135 case Primitive::kPrimLong: { 2136 InvokeRuntimeCallingConvention calling_convention; 2137 locations->SetInAt(0, Location::RegisterPairLocation( 2138 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1))); 2139 locations->SetInAt(1, Location::RegisterPairLocation( 2140 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3))); 2141 // The runtime helper puts the output in R2,R3. 2142 locations->SetOut(Location::RegisterPairLocation(R2, R3)); 2143 break; 2144 } 2145 case Primitive::kPrimFloat: { 2146 InvokeRuntimeCallingConvention calling_convention; 2147 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0))); 2148 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1))); 2149 locations->SetOut(Location::FpuRegisterLocation(S0)); 2150 break; 2151 } 2152 2153 case Primitive::kPrimDouble: { 2154 InvokeRuntimeCallingConvention calling_convention; 2155 locations->SetInAt(0, Location::FpuRegisterPairLocation( 2156 calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1))); 2157 locations->SetInAt(1, Location::FpuRegisterPairLocation( 2158 calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3))); 2159 locations->SetOut(Location::Location::FpuRegisterPairLocation(S0, S1)); 2160 break; 2161 } 2162 2163 default: 2164 LOG(FATAL) << "Unexpected rem type " << type; 2165 } 2166} 2167 2168void InstructionCodeGeneratorARM::VisitRem(HRem* rem) { 2169 LocationSummary* locations = rem->GetLocations(); 2170 Location out = locations->Out(); 2171 Location first = locations->InAt(0); 2172 Location second = locations->InAt(1); 2173 2174 Primitive::Type type = rem->GetResultType(); 2175 switch (type) { 2176 case Primitive::kPrimInt: { 2177 Register reg1 = first.AsRegister<Register>(); 2178 Register reg2 = second.AsRegister<Register>(); 2179 Register temp = locations->GetTemp(0).AsRegister<Register>(); 2180 2181 // temp = reg1 / reg2 (integer division) 2182 // temp = temp * reg2 2183 // dest = reg1 - temp 2184 __ sdiv(temp, reg1, reg2); 2185 __ mul(temp, temp, reg2); 2186 __ sub(out.AsRegister<Register>(), reg1, ShifterOperand(temp)); 2187 break; 2188 } 2189 2190 case Primitive::kPrimLong: { 2191 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLmod), rem, rem->GetDexPc()); 2192 break; 2193 } 2194 2195 case Primitive::kPrimFloat: { 2196 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmodf), rem, rem->GetDexPc()); 2197 break; 2198 } 2199 2200 case Primitive::kPrimDouble: { 2201 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmod), rem, rem->GetDexPc()); 2202 break; 2203 } 2204 2205 default: 2206 LOG(FATAL) << "Unexpected rem type " << type; 2207 } 2208} 2209 2210void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) { 2211 LocationSummary* locations = 2212 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2213 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0))); 2214 if (instruction->HasUses()) { 2215 locations->SetOut(Location::SameAsFirstInput()); 2216 } 2217} 2218 2219void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) { 2220 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction); 2221 codegen_->AddSlowPath(slow_path); 2222 2223 LocationSummary* locations = instruction->GetLocations(); 2224 Location value = locations->InAt(0); 2225 2226 switch (instruction->GetType()) { 2227 case Primitive::kPrimInt: { 2228 if (value.IsRegister()) { 2229 __ cmp(value.AsRegister<Register>(), ShifterOperand(0)); 2230 __ b(slow_path->GetEntryLabel(), EQ); 2231 } else { 2232 DCHECK(value.IsConstant()) << value; 2233 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) { 2234 __ b(slow_path->GetEntryLabel()); 2235 } 2236 } 2237 break; 2238 } 2239 case Primitive::kPrimLong: { 2240 if (value.IsRegisterPair()) { 2241 __ orrs(IP, 2242 value.AsRegisterPairLow<Register>(), 2243 ShifterOperand(value.AsRegisterPairHigh<Register>())); 2244 __ b(slow_path->GetEntryLabel(), EQ); 2245 } else { 2246 DCHECK(value.IsConstant()) << value; 2247 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) { 2248 __ b(slow_path->GetEntryLabel()); 2249 } 2250 } 2251 break; 2252 default: 2253 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType(); 2254 } 2255 } 2256} 2257 2258void LocationsBuilderARM::HandleShift(HBinaryOperation* op) { 2259 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr()); 2260 2261 LocationSummary::CallKind call_kind = op->GetResultType() == Primitive::kPrimLong 2262 ? LocationSummary::kCall 2263 : LocationSummary::kNoCall; 2264 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(op, call_kind); 2265 2266 switch (op->GetResultType()) { 2267 case Primitive::kPrimInt: { 2268 locations->SetInAt(0, Location::RequiresRegister()); 2269 locations->SetInAt(1, Location::RegisterOrConstant(op->InputAt(1))); 2270 locations->SetOut(Location::RequiresRegister()); 2271 break; 2272 } 2273 case Primitive::kPrimLong: { 2274 InvokeRuntimeCallingConvention calling_convention; 2275 locations->SetInAt(0, Location::RegisterPairLocation( 2276 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1))); 2277 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); 2278 // The runtime helper puts the output in R0,R2. 2279 locations->SetOut(Location::RegisterPairLocation(R0, R2)); 2280 break; 2281 } 2282 default: 2283 LOG(FATAL) << "Unexpected operation type " << op->GetResultType(); 2284 } 2285} 2286 2287void InstructionCodeGeneratorARM::HandleShift(HBinaryOperation* op) { 2288 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr()); 2289 2290 LocationSummary* locations = op->GetLocations(); 2291 Location out = locations->Out(); 2292 Location first = locations->InAt(0); 2293 Location second = locations->InAt(1); 2294 2295 Primitive::Type type = op->GetResultType(); 2296 switch (type) { 2297 case Primitive::kPrimInt: { 2298 Register out_reg = out.AsRegister<Register>(); 2299 Register first_reg = first.AsRegister<Register>(); 2300 // Arm doesn't mask the shift count so we need to do it ourselves. 2301 if (second.IsRegister()) { 2302 Register second_reg = second.AsRegister<Register>(); 2303 __ and_(second_reg, second_reg, ShifterOperand(kMaxIntShiftValue)); 2304 if (op->IsShl()) { 2305 __ Lsl(out_reg, first_reg, second_reg); 2306 } else if (op->IsShr()) { 2307 __ Asr(out_reg, first_reg, second_reg); 2308 } else { 2309 __ Lsr(out_reg, first_reg, second_reg); 2310 } 2311 } else { 2312 int32_t cst = second.GetConstant()->AsIntConstant()->GetValue(); 2313 uint32_t shift_value = static_cast<uint32_t>(cst & kMaxIntShiftValue); 2314 if (shift_value == 0) { // arm does not support shifting with 0 immediate. 2315 __ Mov(out_reg, first_reg); 2316 } else if (op->IsShl()) { 2317 __ Lsl(out_reg, first_reg, shift_value); 2318 } else if (op->IsShr()) { 2319 __ Asr(out_reg, first_reg, shift_value); 2320 } else { 2321 __ Lsr(out_reg, first_reg, shift_value); 2322 } 2323 } 2324 break; 2325 } 2326 case Primitive::kPrimLong: { 2327 // TODO: Inline the assembly instead of calling the runtime. 2328 InvokeRuntimeCallingConvention calling_convention; 2329 DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>()); 2330 DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>()); 2331 DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegister<Register>()); 2332 DCHECK_EQ(R0, out.AsRegisterPairLow<Register>()); 2333 DCHECK_EQ(R2, out.AsRegisterPairHigh<Register>()); 2334 2335 int32_t entry_point_offset; 2336 if (op->IsShl()) { 2337 entry_point_offset = QUICK_ENTRY_POINT(pShlLong); 2338 } else if (op->IsShr()) { 2339 entry_point_offset = QUICK_ENTRY_POINT(pShrLong); 2340 } else { 2341 entry_point_offset = QUICK_ENTRY_POINT(pUshrLong); 2342 } 2343 __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset); 2344 __ blx(LR); 2345 break; 2346 } 2347 default: 2348 LOG(FATAL) << "Unexpected operation type " << type; 2349 } 2350} 2351 2352void LocationsBuilderARM::VisitShl(HShl* shl) { 2353 HandleShift(shl); 2354} 2355 2356void InstructionCodeGeneratorARM::VisitShl(HShl* shl) { 2357 HandleShift(shl); 2358} 2359 2360void LocationsBuilderARM::VisitShr(HShr* shr) { 2361 HandleShift(shr); 2362} 2363 2364void InstructionCodeGeneratorARM::VisitShr(HShr* shr) { 2365 HandleShift(shr); 2366} 2367 2368void LocationsBuilderARM::VisitUShr(HUShr* ushr) { 2369 HandleShift(ushr); 2370} 2371 2372void InstructionCodeGeneratorARM::VisitUShr(HUShr* ushr) { 2373 HandleShift(ushr); 2374} 2375 2376void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) { 2377 LocationSummary* locations = 2378 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 2379 InvokeRuntimeCallingConvention calling_convention; 2380 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 2381 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 2382 locations->SetOut(Location::RegisterLocation(R0)); 2383} 2384 2385void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) { 2386 InvokeRuntimeCallingConvention calling_convention; 2387 codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1)); 2388 __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex()); 2389 codegen_->InvokeRuntime( 2390 QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc()); 2391} 2392 2393void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) { 2394 LocationSummary* locations = 2395 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 2396 InvokeRuntimeCallingConvention calling_convention; 2397 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 2398 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(2))); 2399 locations->SetOut(Location::RegisterLocation(R0)); 2400 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 2401} 2402 2403void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) { 2404 InvokeRuntimeCallingConvention calling_convention; 2405 codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(2)); 2406 __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex()); 2407 codegen_->InvokeRuntime( 2408 QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc()); 2409} 2410 2411void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) { 2412 LocationSummary* locations = 2413 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2414 Location location = parameter_visitor_.GetNextLocation(instruction->GetType()); 2415 if (location.IsStackSlot()) { 2416 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); 2417 } else if (location.IsDoubleStackSlot()) { 2418 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); 2419 } 2420 locations->SetOut(location); 2421} 2422 2423void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) { 2424 // Nothing to do, the parameter is already at its location. 2425 UNUSED(instruction); 2426} 2427 2428void LocationsBuilderARM::VisitNot(HNot* not_) { 2429 LocationSummary* locations = 2430 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall); 2431 locations->SetInAt(0, Location::RequiresRegister()); 2432 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 2433} 2434 2435void InstructionCodeGeneratorARM::VisitNot(HNot* not_) { 2436 LocationSummary* locations = not_->GetLocations(); 2437 Location out = locations->Out(); 2438 Location in = locations->InAt(0); 2439 switch (not_->InputAt(0)->GetType()) { 2440 case Primitive::kPrimBoolean: 2441 __ eor(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(1)); 2442 break; 2443 2444 case Primitive::kPrimInt: 2445 __ mvn(out.AsRegister<Register>(), ShifterOperand(in.AsRegister<Register>())); 2446 break; 2447 2448 case Primitive::kPrimLong: 2449 __ mvn(out.AsRegisterPairLow<Register>(), 2450 ShifterOperand(in.AsRegisterPairLow<Register>())); 2451 __ mvn(out.AsRegisterPairHigh<Register>(), 2452 ShifterOperand(in.AsRegisterPairHigh<Register>())); 2453 break; 2454 2455 default: 2456 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType(); 2457 } 2458} 2459 2460void LocationsBuilderARM::VisitCompare(HCompare* compare) { 2461 LocationSummary* locations = 2462 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall); 2463 switch (compare->InputAt(0)->GetType()) { 2464 case Primitive::kPrimLong: { 2465 locations->SetInAt(0, Location::RequiresRegister()); 2466 locations->SetInAt(1, Location::RequiresRegister()); 2467 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 2468 break; 2469 } 2470 case Primitive::kPrimFloat: 2471 case Primitive::kPrimDouble: { 2472 locations->SetInAt(0, Location::RequiresFpuRegister()); 2473 locations->SetInAt(1, Location::RequiresFpuRegister()); 2474 locations->SetOut(Location::RequiresRegister()); 2475 break; 2476 } 2477 default: 2478 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType(); 2479 } 2480} 2481 2482void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) { 2483 LocationSummary* locations = compare->GetLocations(); 2484 Register out = locations->Out().AsRegister<Register>(); 2485 Location left = locations->InAt(0); 2486 Location right = locations->InAt(1); 2487 2488 Label less, greater, done; 2489 Primitive::Type type = compare->InputAt(0)->GetType(); 2490 switch (type) { 2491 case Primitive::kPrimLong: { 2492 __ cmp(left.AsRegisterPairHigh<Register>(), 2493 ShifterOperand(right.AsRegisterPairHigh<Register>())); // Signed compare. 2494 __ b(&less, LT); 2495 __ b(&greater, GT); 2496 // Do LoadImmediate before any `cmp`, as LoadImmediate might affect the status flags. 2497 __ LoadImmediate(out, 0); 2498 __ cmp(left.AsRegisterPairLow<Register>(), 2499 ShifterOperand(right.AsRegisterPairLow<Register>())); // Unsigned compare. 2500 break; 2501 } 2502 case Primitive::kPrimFloat: 2503 case Primitive::kPrimDouble: { 2504 __ LoadImmediate(out, 0); 2505 if (type == Primitive::kPrimFloat) { 2506 __ vcmps(left.AsFpuRegister<SRegister>(), right.AsFpuRegister<SRegister>()); 2507 } else { 2508 __ vcmpd(FromLowSToD(left.AsFpuRegisterPairLow<SRegister>()), 2509 FromLowSToD(right.AsFpuRegisterPairLow<SRegister>())); 2510 } 2511 __ vmstat(); // transfer FP status register to ARM APSR. 2512 __ b(compare->IsGtBias() ? &greater : &less, VS); // VS for unordered. 2513 break; 2514 } 2515 default: 2516 LOG(FATAL) << "Unexpected compare type " << type; 2517 } 2518 __ b(&done, EQ); 2519 __ b(&less, CC); // CC is for both: unsigned compare for longs and 'less than' for floats. 2520 2521 __ Bind(&greater); 2522 __ LoadImmediate(out, 1); 2523 __ b(&done); 2524 2525 __ Bind(&less); 2526 __ LoadImmediate(out, -1); 2527 2528 __ Bind(&done); 2529} 2530 2531void LocationsBuilderARM::VisitPhi(HPhi* instruction) { 2532 LocationSummary* locations = 2533 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2534 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) { 2535 locations->SetInAt(i, Location::Any()); 2536 } 2537 locations->SetOut(Location::Any()); 2538} 2539 2540void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) { 2541 UNUSED(instruction); 2542 LOG(FATAL) << "Unreachable"; 2543} 2544 2545void InstructionCodeGeneratorARM::GenerateMemoryBarrier(MemBarrierKind kind) { 2546 // TODO (ported from quick): revisit Arm barrier kinds 2547 DmbOptions flavour = DmbOptions::ISH; // quiet c++ warnings 2548 switch (kind) { 2549 case MemBarrierKind::kAnyStore: 2550 case MemBarrierKind::kLoadAny: 2551 case MemBarrierKind::kAnyAny: { 2552 flavour = DmbOptions::ISH; 2553 break; 2554 } 2555 case MemBarrierKind::kStoreStore: { 2556 flavour = DmbOptions::ISHST; 2557 break; 2558 } 2559 default: 2560 LOG(FATAL) << "Unexpected memory barrier " << kind; 2561 } 2562 __ dmb(flavour); 2563} 2564 2565void InstructionCodeGeneratorARM::GenerateWideAtomicLoad(Register addr, 2566 uint32_t offset, 2567 Register out_lo, 2568 Register out_hi) { 2569 if (offset != 0) { 2570 __ LoadImmediate(out_lo, offset); 2571 __ add(IP, addr, ShifterOperand(out_lo)); 2572 addr = IP; 2573 } 2574 __ ldrexd(out_lo, out_hi, addr); 2575} 2576 2577void InstructionCodeGeneratorARM::GenerateWideAtomicStore(Register addr, 2578 uint32_t offset, 2579 Register value_lo, 2580 Register value_hi, 2581 Register temp1, 2582 Register temp2) { 2583 Label fail; 2584 if (offset != 0) { 2585 __ LoadImmediate(temp1, offset); 2586 __ add(IP, addr, ShifterOperand(temp1)); 2587 addr = IP; 2588 } 2589 __ Bind(&fail); 2590 // We need a load followed by store. (The address used in a STREX instruction must 2591 // be the same as the address in the most recently executed LDREX instruction.) 2592 __ ldrexd(temp1, temp2, addr); 2593 __ strexd(temp1, value_lo, value_hi, addr); 2594 __ cmp(temp1, ShifterOperand(0)); 2595 __ b(&fail, NE); 2596} 2597 2598void LocationsBuilderARM::HandleFieldSet(HInstruction* instruction, const FieldInfo& field_info) { 2599 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet()); 2600 2601 LocationSummary* locations = 2602 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2603 locations->SetInAt(0, Location::RequiresRegister()); 2604 locations->SetInAt(1, Location::RequiresRegister()); 2605 2606 2607 Primitive::Type field_type = field_info.GetFieldType(); 2608 bool is_wide = field_type == Primitive::kPrimLong || field_type == Primitive::kPrimDouble; 2609 bool generate_volatile = field_info.IsVolatile() 2610 && is_wide 2611 && !codegen_->GetInstructionSetFeatures()->HasAtomicLdrdAndStrd(); 2612 // Temporary registers for the write barrier. 2613 // TODO: consider renaming StoreNeedsWriteBarrier to StoreNeedsGCMark. 2614 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) { 2615 locations->AddTemp(Location::RequiresRegister()); 2616 locations->AddTemp(Location::RequiresRegister()); 2617 } else if (generate_volatile) { 2618 // Arm encoding have some additional constraints for ldrexd/strexd: 2619 // - registers need to be consecutive 2620 // - the first register should be even but not R14. 2621 // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever 2622 // enable Arm encoding. 2623 DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet()); 2624 2625 locations->AddTemp(Location::RequiresRegister()); 2626 locations->AddTemp(Location::RequiresRegister()); 2627 if (field_type == Primitive::kPrimDouble) { 2628 // For doubles we need two more registers to copy the value. 2629 locations->AddTemp(Location::RegisterLocation(R2)); 2630 locations->AddTemp(Location::RegisterLocation(R3)); 2631 } 2632 } 2633} 2634 2635void InstructionCodeGeneratorARM::HandleFieldSet(HInstruction* instruction, 2636 const FieldInfo& field_info) { 2637 DCHECK(instruction->IsInstanceFieldSet() || instruction->IsStaticFieldSet()); 2638 2639 LocationSummary* locations = instruction->GetLocations(); 2640 Register base = locations->InAt(0).AsRegister<Register>(); 2641 Location value = locations->InAt(1); 2642 2643 bool is_volatile = field_info.IsVolatile(); 2644 bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures()->HasAtomicLdrdAndStrd(); 2645 Primitive::Type field_type = field_info.GetFieldType(); 2646 uint32_t offset = field_info.GetFieldOffset().Uint32Value(); 2647 2648 if (is_volatile) { 2649 GenerateMemoryBarrier(MemBarrierKind::kAnyStore); 2650 } 2651 2652 switch (field_type) { 2653 case Primitive::kPrimBoolean: 2654 case Primitive::kPrimByte: { 2655 __ StoreToOffset(kStoreByte, value.AsRegister<Register>(), base, offset); 2656 break; 2657 } 2658 2659 case Primitive::kPrimShort: 2660 case Primitive::kPrimChar: { 2661 __ StoreToOffset(kStoreHalfword, value.AsRegister<Register>(), base, offset); 2662 break; 2663 } 2664 2665 case Primitive::kPrimInt: 2666 case Primitive::kPrimNot: { 2667 Register value_reg = value.AsRegister<Register>(); 2668 __ StoreToOffset(kStoreWord, value_reg, base, offset); 2669 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->InputAt(1))) { 2670 Register temp = locations->GetTemp(0).AsRegister<Register>(); 2671 Register card = locations->GetTemp(1).AsRegister<Register>(); 2672 codegen_->MarkGCCard(temp, card, base, value_reg); 2673 } 2674 break; 2675 } 2676 2677 case Primitive::kPrimLong: { 2678 if (is_volatile && !atomic_ldrd_strd) { 2679 GenerateWideAtomicStore(base, offset, 2680 value.AsRegisterPairLow<Register>(), 2681 value.AsRegisterPairHigh<Register>(), 2682 locations->GetTemp(0).AsRegister<Register>(), 2683 locations->GetTemp(1).AsRegister<Register>()); 2684 } else { 2685 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), base, offset); 2686 } 2687 break; 2688 } 2689 2690 case Primitive::kPrimFloat: { 2691 __ StoreSToOffset(value.AsFpuRegister<SRegister>(), base, offset); 2692 break; 2693 } 2694 2695 case Primitive::kPrimDouble: { 2696 DRegister value_reg = FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()); 2697 if (is_volatile && !atomic_ldrd_strd) { 2698 Register value_reg_lo = locations->GetTemp(0).AsRegister<Register>(); 2699 Register value_reg_hi = locations->GetTemp(1).AsRegister<Register>(); 2700 2701 __ vmovrrd(value_reg_lo, value_reg_hi, value_reg); 2702 2703 GenerateWideAtomicStore(base, offset, 2704 value_reg_lo, 2705 value_reg_hi, 2706 locations->GetTemp(2).AsRegister<Register>(), 2707 locations->GetTemp(3).AsRegister<Register>()); 2708 } else { 2709 __ StoreDToOffset(value_reg, base, offset); 2710 } 2711 break; 2712 } 2713 2714 case Primitive::kPrimVoid: 2715 LOG(FATAL) << "Unreachable type " << field_type; 2716 UNREACHABLE(); 2717 } 2718 2719 if (is_volatile) { 2720 GenerateMemoryBarrier(MemBarrierKind::kAnyAny); 2721 } 2722} 2723 2724void LocationsBuilderARM::HandleFieldGet(HInstruction* instruction, const FieldInfo& field_info) { 2725 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet()); 2726 LocationSummary* locations = 2727 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2728 locations->SetInAt(0, Location::RequiresRegister()); 2729 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 2730 2731 bool generate_volatile = field_info.IsVolatile() 2732 && (field_info.GetFieldType() == Primitive::kPrimDouble) 2733 && !codegen_->GetInstructionSetFeatures()->HasAtomicLdrdAndStrd(); 2734 if (generate_volatile) { 2735 // Arm encoding have some additional constraints for ldrexd/strexd: 2736 // - registers need to be consecutive 2737 // - the first register should be even but not R14. 2738 // We don't test for Arm yet, and the assertion makes sure that we revisit this if we ever 2739 // enable Arm encoding. 2740 DCHECK_EQ(InstructionSet::kThumb2, codegen_->GetInstructionSet()); 2741 locations->AddTemp(Location::RequiresRegister()); 2742 locations->AddTemp(Location::RequiresRegister()); 2743 } 2744} 2745 2746void InstructionCodeGeneratorARM::HandleFieldGet(HInstruction* instruction, 2747 const FieldInfo& field_info) { 2748 DCHECK(instruction->IsInstanceFieldGet() || instruction->IsStaticFieldGet()); 2749 2750 LocationSummary* locations = instruction->GetLocations(); 2751 Register base = locations->InAt(0).AsRegister<Register>(); 2752 Location out = locations->Out(); 2753 bool is_volatile = field_info.IsVolatile(); 2754 bool atomic_ldrd_strd = codegen_->GetInstructionSetFeatures()->HasAtomicLdrdAndStrd(); 2755 Primitive::Type field_type = field_info.GetFieldType(); 2756 uint32_t offset = field_info.GetFieldOffset().Uint32Value(); 2757 2758 switch (field_type) { 2759 case Primitive::kPrimBoolean: { 2760 __ LoadFromOffset(kLoadUnsignedByte, out.AsRegister<Register>(), base, offset); 2761 break; 2762 } 2763 2764 case Primitive::kPrimByte: { 2765 __ LoadFromOffset(kLoadSignedByte, out.AsRegister<Register>(), base, offset); 2766 break; 2767 } 2768 2769 case Primitive::kPrimShort: { 2770 __ LoadFromOffset(kLoadSignedHalfword, out.AsRegister<Register>(), base, offset); 2771 break; 2772 } 2773 2774 case Primitive::kPrimChar: { 2775 __ LoadFromOffset(kLoadUnsignedHalfword, out.AsRegister<Register>(), base, offset); 2776 break; 2777 } 2778 2779 case Primitive::kPrimInt: 2780 case Primitive::kPrimNot: { 2781 __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), base, offset); 2782 break; 2783 } 2784 2785 case Primitive::kPrimLong: { 2786 if (is_volatile && !atomic_ldrd_strd) { 2787 GenerateWideAtomicLoad(base, offset, 2788 out.AsRegisterPairLow<Register>(), 2789 out.AsRegisterPairHigh<Register>()); 2790 } else { 2791 __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), base, offset); 2792 } 2793 break; 2794 } 2795 2796 case Primitive::kPrimFloat: { 2797 __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), base, offset); 2798 break; 2799 } 2800 2801 case Primitive::kPrimDouble: { 2802 DRegister out_reg = FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()); 2803 if (is_volatile && !atomic_ldrd_strd) { 2804 Register lo = locations->GetTemp(0).AsRegister<Register>(); 2805 Register hi = locations->GetTemp(1).AsRegister<Register>(); 2806 GenerateWideAtomicLoad(base, offset, lo, hi); 2807 __ vmovdrr(out_reg, lo, hi); 2808 } else { 2809 __ LoadDFromOffset(out_reg, base, offset); 2810 } 2811 break; 2812 } 2813 2814 case Primitive::kPrimVoid: 2815 LOG(FATAL) << "Unreachable type " << field_type; 2816 UNREACHABLE(); 2817 } 2818 2819 if (is_volatile) { 2820 GenerateMemoryBarrier(MemBarrierKind::kLoadAny); 2821 } 2822} 2823 2824void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) { 2825 HandleFieldSet(instruction, instruction->GetFieldInfo()); 2826} 2827 2828void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) { 2829 HandleFieldSet(instruction, instruction->GetFieldInfo()); 2830} 2831 2832void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) { 2833 HandleFieldGet(instruction, instruction->GetFieldInfo()); 2834} 2835 2836void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) { 2837 HandleFieldGet(instruction, instruction->GetFieldInfo()); 2838} 2839 2840void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) { 2841 HandleFieldGet(instruction, instruction->GetFieldInfo()); 2842} 2843 2844void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) { 2845 HandleFieldGet(instruction, instruction->GetFieldInfo()); 2846} 2847 2848void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) { 2849 HandleFieldSet(instruction, instruction->GetFieldInfo()); 2850} 2851 2852void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) { 2853 HandleFieldSet(instruction, instruction->GetFieldInfo()); 2854} 2855 2856void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) { 2857 LocationSummary* locations = 2858 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2859 locations->SetInAt(0, Location::RequiresRegister()); 2860 if (instruction->HasUses()) { 2861 locations->SetOut(Location::SameAsFirstInput()); 2862 } 2863} 2864 2865void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) { 2866 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction); 2867 codegen_->AddSlowPath(slow_path); 2868 2869 LocationSummary* locations = instruction->GetLocations(); 2870 Location obj = locations->InAt(0); 2871 2872 if (obj.IsRegister()) { 2873 __ cmp(obj.AsRegister<Register>(), ShifterOperand(0)); 2874 __ b(slow_path->GetEntryLabel(), EQ); 2875 } else { 2876 DCHECK(obj.IsConstant()) << obj; 2877 DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0); 2878 __ b(slow_path->GetEntryLabel()); 2879 } 2880} 2881 2882void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) { 2883 LocationSummary* locations = 2884 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2885 locations->SetInAt(0, Location::RequiresRegister()); 2886 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1))); 2887 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 2888} 2889 2890void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) { 2891 LocationSummary* locations = instruction->GetLocations(); 2892 Register obj = locations->InAt(0).AsRegister<Register>(); 2893 Location index = locations->InAt(1); 2894 2895 switch (instruction->GetType()) { 2896 case Primitive::kPrimBoolean: { 2897 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value(); 2898 Register out = locations->Out().AsRegister<Register>(); 2899 if (index.IsConstant()) { 2900 size_t offset = 2901 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset; 2902 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset); 2903 } else { 2904 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>())); 2905 __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset); 2906 } 2907 break; 2908 } 2909 2910 case Primitive::kPrimByte: { 2911 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value(); 2912 Register out = locations->Out().AsRegister<Register>(); 2913 if (index.IsConstant()) { 2914 size_t offset = 2915 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset; 2916 __ LoadFromOffset(kLoadSignedByte, out, obj, offset); 2917 } else { 2918 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>())); 2919 __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset); 2920 } 2921 break; 2922 } 2923 2924 case Primitive::kPrimShort: { 2925 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value(); 2926 Register out = locations->Out().AsRegister<Register>(); 2927 if (index.IsConstant()) { 2928 size_t offset = 2929 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset; 2930 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset); 2931 } else { 2932 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2)); 2933 __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset); 2934 } 2935 break; 2936 } 2937 2938 case Primitive::kPrimChar: { 2939 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value(); 2940 Register out = locations->Out().AsRegister<Register>(); 2941 if (index.IsConstant()) { 2942 size_t offset = 2943 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset; 2944 __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset); 2945 } else { 2946 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2)); 2947 __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset); 2948 } 2949 break; 2950 } 2951 2952 case Primitive::kPrimInt: 2953 case Primitive::kPrimNot: { 2954 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t)); 2955 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value(); 2956 Register out = locations->Out().AsRegister<Register>(); 2957 if (index.IsConstant()) { 2958 size_t offset = 2959 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset; 2960 __ LoadFromOffset(kLoadWord, out, obj, offset); 2961 } else { 2962 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4)); 2963 __ LoadFromOffset(kLoadWord, out, IP, data_offset); 2964 } 2965 break; 2966 } 2967 2968 case Primitive::kPrimLong: { 2969 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value(); 2970 Location out = locations->Out(); 2971 if (index.IsConstant()) { 2972 size_t offset = 2973 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset; 2974 __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset); 2975 } else { 2976 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8)); 2977 __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset); 2978 } 2979 break; 2980 } 2981 2982 case Primitive::kPrimFloat: { 2983 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value(); 2984 Location out = locations->Out(); 2985 DCHECK(out.IsFpuRegister()); 2986 if (index.IsConstant()) { 2987 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset; 2988 __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), obj, offset); 2989 } else { 2990 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4)); 2991 __ LoadSFromOffset(out.AsFpuRegister<SRegister>(), IP, data_offset); 2992 } 2993 break; 2994 } 2995 2996 case Primitive::kPrimDouble: { 2997 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value(); 2998 Location out = locations->Out(); 2999 DCHECK(out.IsFpuRegisterPair()); 3000 if (index.IsConstant()) { 3001 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset; 3002 __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), obj, offset); 3003 } else { 3004 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8)); 3005 __ LoadDFromOffset(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), IP, data_offset); 3006 } 3007 break; 3008 } 3009 3010 case Primitive::kPrimVoid: 3011 LOG(FATAL) << "Unreachable type " << instruction->GetType(); 3012 UNREACHABLE(); 3013 } 3014} 3015 3016void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) { 3017 Primitive::Type value_type = instruction->GetComponentType(); 3018 3019 bool needs_write_barrier = 3020 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue()); 3021 bool needs_runtime_call = instruction->NeedsTypeCheck(); 3022 3023 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary( 3024 instruction, needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall); 3025 if (needs_runtime_call) { 3026 InvokeRuntimeCallingConvention calling_convention; 3027 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 3028 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 3029 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); 3030 } else { 3031 locations->SetInAt(0, Location::RequiresRegister()); 3032 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1))); 3033 locations->SetInAt(2, Location::RequiresRegister()); 3034 3035 if (needs_write_barrier) { 3036 // Temporary registers for the write barrier. 3037 locations->AddTemp(Location::RequiresRegister()); 3038 locations->AddTemp(Location::RequiresRegister()); 3039 } 3040 } 3041} 3042 3043void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) { 3044 LocationSummary* locations = instruction->GetLocations(); 3045 Register obj = locations->InAt(0).AsRegister<Register>(); 3046 Location index = locations->InAt(1); 3047 Primitive::Type value_type = instruction->GetComponentType(); 3048 bool needs_runtime_call = locations->WillCall(); 3049 bool needs_write_barrier = 3050 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue()); 3051 3052 switch (value_type) { 3053 case Primitive::kPrimBoolean: 3054 case Primitive::kPrimByte: { 3055 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value(); 3056 Register value = locations->InAt(2).AsRegister<Register>(); 3057 if (index.IsConstant()) { 3058 size_t offset = 3059 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset; 3060 __ StoreToOffset(kStoreByte, value, obj, offset); 3061 } else { 3062 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>())); 3063 __ StoreToOffset(kStoreByte, value, IP, data_offset); 3064 } 3065 break; 3066 } 3067 3068 case Primitive::kPrimShort: 3069 case Primitive::kPrimChar: { 3070 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value(); 3071 Register value = locations->InAt(2).AsRegister<Register>(); 3072 if (index.IsConstant()) { 3073 size_t offset = 3074 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset; 3075 __ StoreToOffset(kStoreHalfword, value, obj, offset); 3076 } else { 3077 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2)); 3078 __ StoreToOffset(kStoreHalfword, value, IP, data_offset); 3079 } 3080 break; 3081 } 3082 3083 case Primitive::kPrimInt: 3084 case Primitive::kPrimNot: { 3085 if (!needs_runtime_call) { 3086 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value(); 3087 Register value = locations->InAt(2).AsRegister<Register>(); 3088 if (index.IsConstant()) { 3089 size_t offset = 3090 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset; 3091 __ StoreToOffset(kStoreWord, value, obj, offset); 3092 } else { 3093 DCHECK(index.IsRegister()) << index; 3094 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4)); 3095 __ StoreToOffset(kStoreWord, value, IP, data_offset); 3096 } 3097 if (needs_write_barrier) { 3098 DCHECK_EQ(value_type, Primitive::kPrimNot); 3099 Register temp = locations->GetTemp(0).AsRegister<Register>(); 3100 Register card = locations->GetTemp(1).AsRegister<Register>(); 3101 codegen_->MarkGCCard(temp, card, obj, value); 3102 } 3103 } else { 3104 DCHECK_EQ(value_type, Primitive::kPrimNot); 3105 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject), 3106 instruction, 3107 instruction->GetDexPc()); 3108 } 3109 break; 3110 } 3111 3112 case Primitive::kPrimLong: { 3113 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value(); 3114 Location value = locations->InAt(2); 3115 if (index.IsConstant()) { 3116 size_t offset = 3117 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset; 3118 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset); 3119 } else { 3120 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8)); 3121 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset); 3122 } 3123 break; 3124 } 3125 3126 case Primitive::kPrimFloat: { 3127 uint32_t data_offset = mirror::Array::DataOffset(sizeof(float)).Uint32Value(); 3128 Location value = locations->InAt(2); 3129 DCHECK(value.IsFpuRegister()); 3130 if (index.IsConstant()) { 3131 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset; 3132 __ StoreSToOffset(value.AsFpuRegister<SRegister>(), obj, offset); 3133 } else { 3134 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4)); 3135 __ StoreSToOffset(value.AsFpuRegister<SRegister>(), IP, data_offset); 3136 } 3137 break; 3138 } 3139 3140 case Primitive::kPrimDouble: { 3141 uint32_t data_offset = mirror::Array::DataOffset(sizeof(double)).Uint32Value(); 3142 Location value = locations->InAt(2); 3143 DCHECK(value.IsFpuRegisterPair()); 3144 if (index.IsConstant()) { 3145 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset; 3146 __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), obj, offset); 3147 } else { 3148 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8)); 3149 __ StoreDToOffset(FromLowSToD(value.AsFpuRegisterPairLow<SRegister>()), IP, data_offset); 3150 } 3151 break; 3152 } 3153 3154 case Primitive::kPrimVoid: 3155 LOG(FATAL) << "Unreachable type " << value_type; 3156 UNREACHABLE(); 3157 } 3158} 3159 3160void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) { 3161 LocationSummary* locations = 3162 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 3163 locations->SetInAt(0, Location::RequiresRegister()); 3164 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 3165} 3166 3167void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) { 3168 LocationSummary* locations = instruction->GetLocations(); 3169 uint32_t offset = mirror::Array::LengthOffset().Uint32Value(); 3170 Register obj = locations->InAt(0).AsRegister<Register>(); 3171 Register out = locations->Out().AsRegister<Register>(); 3172 __ LoadFromOffset(kLoadWord, out, obj, offset); 3173} 3174 3175void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) { 3176 LocationSummary* locations = 3177 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 3178 locations->SetInAt(0, Location::RequiresRegister()); 3179 locations->SetInAt(1, Location::RequiresRegister()); 3180 if (instruction->HasUses()) { 3181 locations->SetOut(Location::SameAsFirstInput()); 3182 } 3183} 3184 3185void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) { 3186 LocationSummary* locations = instruction->GetLocations(); 3187 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM( 3188 instruction, locations->InAt(0), locations->InAt(1)); 3189 codegen_->AddSlowPath(slow_path); 3190 3191 Register index = locations->InAt(0).AsRegister<Register>(); 3192 Register length = locations->InAt(1).AsRegister<Register>(); 3193 3194 __ cmp(index, ShifterOperand(length)); 3195 __ b(slow_path->GetEntryLabel(), CS); 3196} 3197 3198void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) { 3199 Label is_null; 3200 __ CompareAndBranchIfZero(value, &is_null); 3201 __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value()); 3202 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift); 3203 __ strb(card, Address(card, temp)); 3204 __ Bind(&is_null); 3205} 3206 3207void LocationsBuilderARM::VisitTemporary(HTemporary* temp) { 3208 temp->SetLocations(nullptr); 3209} 3210 3211void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) { 3212 // Nothing to do, this is driven by the code generator. 3213 UNUSED(temp); 3214} 3215 3216void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) { 3217 UNUSED(instruction); 3218 LOG(FATAL) << "Unreachable"; 3219} 3220 3221void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) { 3222 codegen_->GetMoveResolver()->EmitNativeCode(instruction); 3223} 3224 3225void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) { 3226 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath); 3227} 3228 3229void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) { 3230 HBasicBlock* block = instruction->GetBlock(); 3231 if (block->GetLoopInformation() != nullptr) { 3232 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction); 3233 // The back edge will generate the suspend check. 3234 return; 3235 } 3236 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) { 3237 // The goto will generate the suspend check. 3238 return; 3239 } 3240 GenerateSuspendCheck(instruction, nullptr); 3241} 3242 3243void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction, 3244 HBasicBlock* successor) { 3245 SuspendCheckSlowPathARM* slow_path = 3246 new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor); 3247 codegen_->AddSlowPath(slow_path); 3248 3249 __ LoadFromOffset( 3250 kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value()); 3251 __ cmp(IP, ShifterOperand(0)); 3252 // TODO: Figure out the branch offsets and use cbz/cbnz. 3253 if (successor == nullptr) { 3254 __ b(slow_path->GetEntryLabel(), NE); 3255 __ Bind(slow_path->GetReturnLabel()); 3256 } else { 3257 __ b(codegen_->GetLabelOf(successor), EQ); 3258 __ b(slow_path->GetEntryLabel()); 3259 } 3260} 3261 3262ArmAssembler* ParallelMoveResolverARM::GetAssembler() const { 3263 return codegen_->GetAssembler(); 3264} 3265 3266void ParallelMoveResolverARM::EmitMove(size_t index) { 3267 MoveOperands* move = moves_.Get(index); 3268 Location source = move->GetSource(); 3269 Location destination = move->GetDestination(); 3270 3271 if (source.IsRegister()) { 3272 if (destination.IsRegister()) { 3273 __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>()); 3274 } else { 3275 DCHECK(destination.IsStackSlot()); 3276 __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), 3277 SP, destination.GetStackIndex()); 3278 } 3279 } else if (source.IsStackSlot()) { 3280 if (destination.IsRegister()) { 3281 __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), 3282 SP, source.GetStackIndex()); 3283 } else if (destination.IsFpuRegister()) { 3284 __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex()); 3285 } else { 3286 DCHECK(destination.IsStackSlot()); 3287 __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex()); 3288 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 3289 } 3290 } else if (source.IsFpuRegister()) { 3291 if (destination.IsFpuRegister()) { 3292 __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>()); 3293 } else { 3294 DCHECK(destination.IsStackSlot()); 3295 __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex()); 3296 } 3297 } else if (source.IsDoubleStackSlot()) { 3298 if (destination.IsFpuRegisterPair()) { 3299 __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()), 3300 SP, source.GetStackIndex()); 3301 } else { 3302 DCHECK(destination.IsDoubleStackSlot()) << destination; 3303 __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex()); 3304 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 3305 __ LoadFromOffset(kLoadWord, IP, SP, source.GetHighStackIndex(kArmWordSize)); 3306 __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize)); 3307 } 3308 } else { 3309 DCHECK(source.IsConstant()) << source; 3310 HInstruction* constant = source.GetConstant(); 3311 if (constant->IsIntConstant()) { 3312 int32_t value = constant->AsIntConstant()->GetValue(); 3313 if (destination.IsRegister()) { 3314 __ LoadImmediate(destination.AsRegister<Register>(), value); 3315 } else { 3316 DCHECK(destination.IsStackSlot()); 3317 __ LoadImmediate(IP, value); 3318 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 3319 } 3320 } else { 3321 DCHECK(constant->IsFloatConstant()); 3322 float value = constant->AsFloatConstant()->GetValue(); 3323 if (destination.IsFpuRegister()) { 3324 __ LoadSImmediate(destination.AsFpuRegister<SRegister>(), value); 3325 } else { 3326 DCHECK(destination.IsStackSlot()); 3327 __ LoadImmediate(IP, bit_cast<int32_t, float>(value)); 3328 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 3329 } 3330 } 3331 } 3332} 3333 3334void ParallelMoveResolverARM::Exchange(Register reg, int mem) { 3335 __ Mov(IP, reg); 3336 __ LoadFromOffset(kLoadWord, reg, SP, mem); 3337 __ StoreToOffset(kStoreWord, IP, SP, mem); 3338} 3339 3340void ParallelMoveResolverARM::Exchange(int mem1, int mem2) { 3341 ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters()); 3342 int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0; 3343 __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()), 3344 SP, mem1 + stack_offset); 3345 __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset); 3346 __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()), 3347 SP, mem2 + stack_offset); 3348 __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset); 3349} 3350 3351void ParallelMoveResolverARM::EmitSwap(size_t index) { 3352 MoveOperands* move = moves_.Get(index); 3353 Location source = move->GetSource(); 3354 Location destination = move->GetDestination(); 3355 3356 if (source.IsRegister() && destination.IsRegister()) { 3357 DCHECK_NE(source.AsRegister<Register>(), IP); 3358 DCHECK_NE(destination.AsRegister<Register>(), IP); 3359 __ Mov(IP, source.AsRegister<Register>()); 3360 __ Mov(source.AsRegister<Register>(), destination.AsRegister<Register>()); 3361 __ Mov(destination.AsRegister<Register>(), IP); 3362 } else if (source.IsRegister() && destination.IsStackSlot()) { 3363 Exchange(source.AsRegister<Register>(), destination.GetStackIndex()); 3364 } else if (source.IsStackSlot() && destination.IsRegister()) { 3365 Exchange(destination.AsRegister<Register>(), source.GetStackIndex()); 3366 } else if (source.IsStackSlot() && destination.IsStackSlot()) { 3367 Exchange(source.GetStackIndex(), destination.GetStackIndex()); 3368 } else if (source.IsFpuRegister() && destination.IsFpuRegister()) { 3369 __ vmovrs(IP, source.AsFpuRegister<SRegister>()); 3370 __ vmovs(source.AsFpuRegister<SRegister>(), destination.AsFpuRegister<SRegister>()); 3371 __ vmovsr(destination.AsFpuRegister<SRegister>(), IP); 3372 } else if (source.IsFpuRegister() || destination.IsFpuRegister()) { 3373 SRegister reg = source.IsFpuRegister() ? source.AsFpuRegister<SRegister>() 3374 : destination.AsFpuRegister<SRegister>(); 3375 int mem = source.IsFpuRegister() 3376 ? destination.GetStackIndex() 3377 : source.GetStackIndex(); 3378 3379 __ vmovrs(IP, reg); 3380 __ LoadFromOffset(kLoadWord, IP, SP, mem); 3381 __ StoreToOffset(kStoreWord, IP, SP, mem); 3382 } else if (source.IsDoubleStackSlot() && destination.IsDoubleStackSlot()) { 3383 Exchange(source.GetStackIndex(), destination.GetStackIndex()); 3384 Exchange(source.GetHighStackIndex(kArmWordSize), destination.GetHighStackIndex(kArmWordSize)); 3385 } else { 3386 LOG(FATAL) << "Unimplemented" << source << " <-> " << destination; 3387 } 3388} 3389 3390void ParallelMoveResolverARM::SpillScratch(int reg) { 3391 __ Push(static_cast<Register>(reg)); 3392} 3393 3394void ParallelMoveResolverARM::RestoreScratch(int reg) { 3395 __ Pop(static_cast<Register>(reg)); 3396} 3397 3398void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) { 3399 LocationSummary::CallKind call_kind = cls->CanCallRuntime() 3400 ? LocationSummary::kCallOnSlowPath 3401 : LocationSummary::kNoCall; 3402 LocationSummary* locations = 3403 new (GetGraph()->GetArena()) LocationSummary(cls, call_kind); 3404 locations->SetOut(Location::RequiresRegister()); 3405} 3406 3407void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) { 3408 Register out = cls->GetLocations()->Out().AsRegister<Register>(); 3409 if (cls->IsReferrersClass()) { 3410 DCHECK(!cls->CanCallRuntime()); 3411 DCHECK(!cls->MustGenerateClinitCheck()); 3412 codegen_->LoadCurrentMethod(out); 3413 __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value()); 3414 } else { 3415 DCHECK(cls->CanCallRuntime()); 3416 codegen_->LoadCurrentMethod(out); 3417 __ LoadFromOffset( 3418 kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value()); 3419 __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())); 3420 3421 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM( 3422 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck()); 3423 codegen_->AddSlowPath(slow_path); 3424 __ cmp(out, ShifterOperand(0)); 3425 __ b(slow_path->GetEntryLabel(), EQ); 3426 if (cls->MustGenerateClinitCheck()) { 3427 GenerateClassInitializationCheck(slow_path, out); 3428 } else { 3429 __ Bind(slow_path->GetExitLabel()); 3430 } 3431 } 3432} 3433 3434void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) { 3435 LocationSummary* locations = 3436 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath); 3437 locations->SetInAt(0, Location::RequiresRegister()); 3438 if (check->HasUses()) { 3439 locations->SetOut(Location::SameAsFirstInput()); 3440 } 3441} 3442 3443void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) { 3444 // We assume the class is not null. 3445 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM( 3446 check->GetLoadClass(), check, check->GetDexPc(), true); 3447 codegen_->AddSlowPath(slow_path); 3448 GenerateClassInitializationCheck(slow_path, 3449 check->GetLocations()->InAt(0).AsRegister<Register>()); 3450} 3451 3452void InstructionCodeGeneratorARM::GenerateClassInitializationCheck( 3453 SlowPathCodeARM* slow_path, Register class_reg) { 3454 __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value()); 3455 __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized)); 3456 __ b(slow_path->GetEntryLabel(), LT); 3457 // Even if the initialized flag is set, we may be in a situation where caches are not synced 3458 // properly. Therefore, we do a memory fence. 3459 __ dmb(ISH); 3460 __ Bind(slow_path->GetExitLabel()); 3461} 3462 3463void LocationsBuilderARM::VisitLoadString(HLoadString* load) { 3464 LocationSummary* locations = 3465 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath); 3466 locations->SetOut(Location::RequiresRegister()); 3467} 3468 3469void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) { 3470 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load); 3471 codegen_->AddSlowPath(slow_path); 3472 3473 Register out = load->GetLocations()->Out().AsRegister<Register>(); 3474 codegen_->LoadCurrentMethod(out); 3475 __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value()); 3476 __ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value()); 3477 __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex())); 3478 __ cmp(out, ShifterOperand(0)); 3479 __ b(slow_path->GetEntryLabel(), EQ); 3480 __ Bind(slow_path->GetExitLabel()); 3481} 3482 3483void LocationsBuilderARM::VisitLoadException(HLoadException* load) { 3484 LocationSummary* locations = 3485 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall); 3486 locations->SetOut(Location::RequiresRegister()); 3487} 3488 3489void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) { 3490 Register out = load->GetLocations()->Out().AsRegister<Register>(); 3491 int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value(); 3492 __ LoadFromOffset(kLoadWord, out, TR, offset); 3493 __ LoadImmediate(IP, 0); 3494 __ StoreToOffset(kStoreWord, IP, TR, offset); 3495} 3496 3497void LocationsBuilderARM::VisitThrow(HThrow* instruction) { 3498 LocationSummary* locations = 3499 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 3500 InvokeRuntimeCallingConvention calling_convention; 3501 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 3502} 3503 3504void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) { 3505 codegen_->InvokeRuntime( 3506 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc()); 3507} 3508 3509void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) { 3510 LocationSummary::CallKind call_kind = instruction->IsClassFinal() 3511 ? LocationSummary::kNoCall 3512 : LocationSummary::kCallOnSlowPath; 3513 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind); 3514 locations->SetInAt(0, Location::RequiresRegister()); 3515 locations->SetInAt(1, Location::RequiresRegister()); 3516 locations->SetOut(Location::RequiresRegister()); 3517} 3518 3519void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) { 3520 LocationSummary* locations = instruction->GetLocations(); 3521 Register obj = locations->InAt(0).AsRegister<Register>(); 3522 Register cls = locations->InAt(1).AsRegister<Register>(); 3523 Register out = locations->Out().AsRegister<Register>(); 3524 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); 3525 Label done, zero; 3526 SlowPathCodeARM* slow_path = nullptr; 3527 3528 // Return 0 if `obj` is null. 3529 // TODO: avoid this check if we know obj is not null. 3530 __ cmp(obj, ShifterOperand(0)); 3531 __ b(&zero, EQ); 3532 // Compare the class of `obj` with `cls`. 3533 __ LoadFromOffset(kLoadWord, out, obj, class_offset); 3534 __ cmp(out, ShifterOperand(cls)); 3535 if (instruction->IsClassFinal()) { 3536 // Classes must be equal for the instanceof to succeed. 3537 __ b(&zero, NE); 3538 __ LoadImmediate(out, 1); 3539 __ b(&done); 3540 } else { 3541 // If the classes are not equal, we go into a slow path. 3542 DCHECK(locations->OnlyCallsOnSlowPath()); 3543 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM( 3544 instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc()); 3545 codegen_->AddSlowPath(slow_path); 3546 __ b(slow_path->GetEntryLabel(), NE); 3547 __ LoadImmediate(out, 1); 3548 __ b(&done); 3549 } 3550 __ Bind(&zero); 3551 __ LoadImmediate(out, 0); 3552 if (slow_path != nullptr) { 3553 __ Bind(slow_path->GetExitLabel()); 3554 } 3555 __ Bind(&done); 3556} 3557 3558void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) { 3559 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary( 3560 instruction, LocationSummary::kCallOnSlowPath); 3561 locations->SetInAt(0, Location::RequiresRegister()); 3562 locations->SetInAt(1, Location::RequiresRegister()); 3563 locations->AddTemp(Location::RequiresRegister()); 3564} 3565 3566void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) { 3567 LocationSummary* locations = instruction->GetLocations(); 3568 Register obj = locations->InAt(0).AsRegister<Register>(); 3569 Register cls = locations->InAt(1).AsRegister<Register>(); 3570 Register temp = locations->GetTemp(0).AsRegister<Register>(); 3571 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); 3572 3573 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM( 3574 instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc()); 3575 codegen_->AddSlowPath(slow_path); 3576 3577 // TODO: avoid this check if we know obj is not null. 3578 __ cmp(obj, ShifterOperand(0)); 3579 __ b(slow_path->GetExitLabel(), EQ); 3580 // Compare the class of `obj` with `cls`. 3581 __ LoadFromOffset(kLoadWord, temp, obj, class_offset); 3582 __ cmp(temp, ShifterOperand(cls)); 3583 __ b(slow_path->GetEntryLabel(), NE); 3584 __ Bind(slow_path->GetExitLabel()); 3585} 3586 3587void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) { 3588 LocationSummary* locations = 3589 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 3590 InvokeRuntimeCallingConvention calling_convention; 3591 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 3592} 3593 3594void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) { 3595 codegen_->InvokeRuntime(instruction->IsEnter() 3596 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject), 3597 instruction, 3598 instruction->GetDexPc()); 3599} 3600 3601void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); } 3602void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); } 3603void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); } 3604 3605void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction) { 3606 LocationSummary* locations = 3607 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 3608 DCHECK(instruction->GetResultType() == Primitive::kPrimInt 3609 || instruction->GetResultType() == Primitive::kPrimLong); 3610 locations->SetInAt(0, Location::RequiresRegister()); 3611 locations->SetInAt(1, Location::RequiresRegister()); 3612 bool output_overlaps = (instruction->GetResultType() == Primitive::kPrimLong); 3613 locations->SetOut(Location::RequiresRegister(), output_overlaps); 3614} 3615 3616void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) { 3617 HandleBitwiseOperation(instruction); 3618} 3619 3620void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) { 3621 HandleBitwiseOperation(instruction); 3622} 3623 3624void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) { 3625 HandleBitwiseOperation(instruction); 3626} 3627 3628void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) { 3629 LocationSummary* locations = instruction->GetLocations(); 3630 3631 if (instruction->GetResultType() == Primitive::kPrimInt) { 3632 Register first = locations->InAt(0).AsRegister<Register>(); 3633 Register second = locations->InAt(1).AsRegister<Register>(); 3634 Register out = locations->Out().AsRegister<Register>(); 3635 if (instruction->IsAnd()) { 3636 __ and_(out, first, ShifterOperand(second)); 3637 } else if (instruction->IsOr()) { 3638 __ orr(out, first, ShifterOperand(second)); 3639 } else { 3640 DCHECK(instruction->IsXor()); 3641 __ eor(out, first, ShifterOperand(second)); 3642 } 3643 } else { 3644 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong); 3645 Location first = locations->InAt(0); 3646 Location second = locations->InAt(1); 3647 Location out = locations->Out(); 3648 if (instruction->IsAnd()) { 3649 __ and_(out.AsRegisterPairLow<Register>(), 3650 first.AsRegisterPairLow<Register>(), 3651 ShifterOperand(second.AsRegisterPairLow<Register>())); 3652 __ and_(out.AsRegisterPairHigh<Register>(), 3653 first.AsRegisterPairHigh<Register>(), 3654 ShifterOperand(second.AsRegisterPairHigh<Register>())); 3655 } else if (instruction->IsOr()) { 3656 __ orr(out.AsRegisterPairLow<Register>(), 3657 first.AsRegisterPairLow<Register>(), 3658 ShifterOperand(second.AsRegisterPairLow<Register>())); 3659 __ orr(out.AsRegisterPairHigh<Register>(), 3660 first.AsRegisterPairHigh<Register>(), 3661 ShifterOperand(second.AsRegisterPairHigh<Register>())); 3662 } else { 3663 DCHECK(instruction->IsXor()); 3664 __ eor(out.AsRegisterPairLow<Register>(), 3665 first.AsRegisterPairLow<Register>(), 3666 ShifterOperand(second.AsRegisterPairLow<Register>())); 3667 __ eor(out.AsRegisterPairHigh<Register>(), 3668 first.AsRegisterPairHigh<Register>(), 3669 ShifterOperand(second.AsRegisterPairHigh<Register>())); 3670 } 3671 } 3672} 3673 3674} // namespace arm 3675} // namespace art 3676