code_generator_arm.cc revision 4c0b61f506644bb6b647be05d02c5fb45b9ceb48
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "code_generator_arm.h" 18 19#include "entrypoints/quick/quick_entrypoints.h" 20#include "gc/accounting/card_table.h" 21#include "mirror/array-inl.h" 22#include "mirror/art_method.h" 23#include "mirror/class.h" 24#include "thread.h" 25#include "utils/arm/assembler_arm.h" 26#include "utils/arm/managed_register_arm.h" 27#include "utils/assembler.h" 28#include "utils/stack_checks.h" 29 30namespace art { 31 32namespace arm { 33 34static DRegister FromLowSToD(SRegister reg) { 35 DCHECK_EQ(reg % 2, 0); 36 return static_cast<DRegister>(reg / 2); 37} 38 39static constexpr bool kExplicitStackOverflowCheck = false; 40 41static constexpr int kNumberOfPushedRegistersAtEntry = 1 + 2; // LR, R6, R7 42static constexpr int kCurrentMethodStackOffset = 0; 43 44static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2, R3 }; 45static constexpr size_t kRuntimeParameterCoreRegistersLength = 46 arraysize(kRuntimeParameterCoreRegisters); 47static constexpr SRegister kRuntimeParameterFpuRegisters[] = { S0, S1 }; 48static constexpr size_t kRuntimeParameterFpuRegistersLength = 49 arraysize(kRuntimeParameterFpuRegisters); 50 51class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> { 52 public: 53 InvokeRuntimeCallingConvention() 54 : CallingConvention(kRuntimeParameterCoreRegisters, 55 kRuntimeParameterCoreRegistersLength, 56 kRuntimeParameterFpuRegisters, 57 kRuntimeParameterFpuRegistersLength) {} 58 59 private: 60 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention); 61}; 62 63#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())-> 64#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value() 65 66class SlowPathCodeARM : public SlowPathCode { 67 public: 68 SlowPathCodeARM() : entry_label_(), exit_label_() {} 69 70 Label* GetEntryLabel() { return &entry_label_; } 71 Label* GetExitLabel() { return &exit_label_; } 72 73 private: 74 Label entry_label_; 75 Label exit_label_; 76 77 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM); 78}; 79 80class NullCheckSlowPathARM : public SlowPathCodeARM { 81 public: 82 explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {} 83 84 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 85 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 86 __ Bind(GetEntryLabel()); 87 arm_codegen->InvokeRuntime( 88 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc()); 89 } 90 91 private: 92 HNullCheck* const instruction_; 93 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM); 94}; 95 96class DivZeroCheckSlowPathARM : public SlowPathCodeARM { 97 public: 98 explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {} 99 100 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 101 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 102 __ Bind(GetEntryLabel()); 103 arm_codegen->InvokeRuntime( 104 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc()); 105 } 106 107 private: 108 HDivZeroCheck* const instruction_; 109 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM); 110}; 111 112class StackOverflowCheckSlowPathARM : public SlowPathCodeARM { 113 public: 114 StackOverflowCheckSlowPathARM() {} 115 116 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 117 __ Bind(GetEntryLabel()); 118 __ LoadFromOffset(kLoadWord, PC, TR, 119 QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowStackOverflow).Int32Value()); 120 } 121 122 private: 123 DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM); 124}; 125 126class SuspendCheckSlowPathARM : public SlowPathCodeARM { 127 public: 128 SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor) 129 : instruction_(instruction), successor_(successor) {} 130 131 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 132 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 133 __ Bind(GetEntryLabel()); 134 codegen->SaveLiveRegisters(instruction_->GetLocations()); 135 arm_codegen->InvokeRuntime( 136 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc()); 137 codegen->RestoreLiveRegisters(instruction_->GetLocations()); 138 if (successor_ == nullptr) { 139 __ b(GetReturnLabel()); 140 } else { 141 __ b(arm_codegen->GetLabelOf(successor_)); 142 } 143 } 144 145 Label* GetReturnLabel() { 146 DCHECK(successor_ == nullptr); 147 return &return_label_; 148 } 149 150 private: 151 HSuspendCheck* const instruction_; 152 // If not null, the block to branch to after the suspend check. 153 HBasicBlock* const successor_; 154 155 // If `successor_` is null, the label to branch to after the suspend check. 156 Label return_label_; 157 158 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM); 159}; 160 161class BoundsCheckSlowPathARM : public SlowPathCodeARM { 162 public: 163 BoundsCheckSlowPathARM(HBoundsCheck* instruction, 164 Location index_location, 165 Location length_location) 166 : instruction_(instruction), 167 index_location_(index_location), 168 length_location_(length_location) {} 169 170 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 171 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 172 __ Bind(GetEntryLabel()); 173 // We're moving two locations to locations that could overlap, so we need a parallel 174 // move resolver. 175 InvokeRuntimeCallingConvention calling_convention; 176 codegen->EmitParallelMoves( 177 index_location_, 178 Location::RegisterLocation(calling_convention.GetRegisterAt(0)), 179 length_location_, 180 Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 181 arm_codegen->InvokeRuntime( 182 QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc()); 183 } 184 185 private: 186 HBoundsCheck* const instruction_; 187 const Location index_location_; 188 const Location length_location_; 189 190 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM); 191}; 192 193class LoadClassSlowPathARM : public SlowPathCodeARM { 194 public: 195 LoadClassSlowPathARM(HLoadClass* cls, 196 HInstruction* at, 197 uint32_t dex_pc, 198 bool do_clinit) 199 : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) { 200 DCHECK(at->IsLoadClass() || at->IsClinitCheck()); 201 } 202 203 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 204 LocationSummary* locations = at_->GetLocations(); 205 206 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 207 __ Bind(GetEntryLabel()); 208 codegen->SaveLiveRegisters(locations); 209 210 InvokeRuntimeCallingConvention calling_convention; 211 __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex()); 212 arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1)); 213 int32_t entry_point_offset = do_clinit_ 214 ? QUICK_ENTRY_POINT(pInitializeStaticStorage) 215 : QUICK_ENTRY_POINT(pInitializeType); 216 arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_); 217 218 // Move the class to the desired location. 219 Location out = locations->Out(); 220 if (out.IsValid()) { 221 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg())); 222 arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0)); 223 } 224 codegen->RestoreLiveRegisters(locations); 225 __ b(GetExitLabel()); 226 } 227 228 private: 229 // The class this slow path will load. 230 HLoadClass* const cls_; 231 232 // The instruction where this slow path is happening. 233 // (Might be the load class or an initialization check). 234 HInstruction* const at_; 235 236 // The dex PC of `at_`. 237 const uint32_t dex_pc_; 238 239 // Whether to initialize the class. 240 const bool do_clinit_; 241 242 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM); 243}; 244 245class LoadStringSlowPathARM : public SlowPathCodeARM { 246 public: 247 explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {} 248 249 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 250 LocationSummary* locations = instruction_->GetLocations(); 251 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); 252 253 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 254 __ Bind(GetEntryLabel()); 255 codegen->SaveLiveRegisters(locations); 256 257 InvokeRuntimeCallingConvention calling_convention; 258 arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(0)); 259 __ LoadImmediate(calling_convention.GetRegisterAt(1), instruction_->GetStringIndex()); 260 arm_codegen->InvokeRuntime( 261 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc()); 262 arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0)); 263 264 codegen->RestoreLiveRegisters(locations); 265 __ b(GetExitLabel()); 266 } 267 268 private: 269 HLoadString* const instruction_; 270 271 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM); 272}; 273 274class TypeCheckSlowPathARM : public SlowPathCodeARM { 275 public: 276 TypeCheckSlowPathARM(HInstruction* instruction, 277 Location class_to_check, 278 Location object_class, 279 uint32_t dex_pc) 280 : instruction_(instruction), 281 class_to_check_(class_to_check), 282 object_class_(object_class), 283 dex_pc_(dex_pc) {} 284 285 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 286 LocationSummary* locations = instruction_->GetLocations(); 287 DCHECK(instruction_->IsCheckCast() 288 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); 289 290 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 291 __ Bind(GetEntryLabel()); 292 codegen->SaveLiveRegisters(locations); 293 294 // We're moving two locations to locations that could overlap, so we need a parallel 295 // move resolver. 296 InvokeRuntimeCallingConvention calling_convention; 297 codegen->EmitParallelMoves( 298 class_to_check_, 299 Location::RegisterLocation(calling_convention.GetRegisterAt(0)), 300 object_class_, 301 Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 302 303 if (instruction_->IsInstanceOf()) { 304 arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_); 305 arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0)); 306 } else { 307 DCHECK(instruction_->IsCheckCast()); 308 arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_); 309 } 310 311 codegen->RestoreLiveRegisters(locations); 312 __ b(GetExitLabel()); 313 } 314 315 private: 316 HInstruction* const instruction_; 317 const Location class_to_check_; 318 const Location object_class_; 319 uint32_t dex_pc_; 320 321 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM); 322}; 323 324#undef __ 325 326#undef __ 327#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())-> 328 329inline Condition ARMCondition(IfCondition cond) { 330 switch (cond) { 331 case kCondEQ: return EQ; 332 case kCondNE: return NE; 333 case kCondLT: return LT; 334 case kCondLE: return LE; 335 case kCondGT: return GT; 336 case kCondGE: return GE; 337 default: 338 LOG(FATAL) << "Unknown if condition"; 339 } 340 return EQ; // Unreachable. 341} 342 343inline Condition ARMOppositeCondition(IfCondition cond) { 344 switch (cond) { 345 case kCondEQ: return NE; 346 case kCondNE: return EQ; 347 case kCondLT: return GE; 348 case kCondLE: return GT; 349 case kCondGT: return LE; 350 case kCondGE: return LT; 351 default: 352 LOG(FATAL) << "Unknown if condition"; 353 } 354 return EQ; // Unreachable. 355} 356 357void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const { 358 stream << ArmManagedRegister::FromCoreRegister(Register(reg)); 359} 360 361void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const { 362 stream << ArmManagedRegister::FromSRegister(SRegister(reg)); 363} 364 365size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) { 366 __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index); 367 return kArmWordSize; 368} 369 370size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) { 371 __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index); 372 return kArmWordSize; 373} 374 375CodeGeneratorARM::CodeGeneratorARM(HGraph* graph) 376 : CodeGenerator(graph, kNumberOfCoreRegisters, kNumberOfSRegisters, kNumberOfRegisterPairs), 377 block_labels_(graph->GetArena(), 0), 378 location_builder_(graph, this), 379 instruction_visitor_(graph, this), 380 move_resolver_(graph->GetArena(), this), 381 assembler_(true) {} 382 383size_t CodeGeneratorARM::FrameEntrySpillSize() const { 384 return kNumberOfPushedRegistersAtEntry * kArmWordSize; 385} 386 387Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const { 388 switch (type) { 389 case Primitive::kPrimLong: { 390 size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs); 391 ArmManagedRegister pair = 392 ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg)); 393 DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]); 394 DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]); 395 396 blocked_core_registers_[pair.AsRegisterPairLow()] = true; 397 blocked_core_registers_[pair.AsRegisterPairHigh()] = true; 398 UpdateBlockedPairRegisters(); 399 return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh()); 400 } 401 402 case Primitive::kPrimByte: 403 case Primitive::kPrimBoolean: 404 case Primitive::kPrimChar: 405 case Primitive::kPrimShort: 406 case Primitive::kPrimInt: 407 case Primitive::kPrimNot: { 408 int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters); 409 // Block all register pairs that contain `reg`. 410 for (int i = 0; i < kNumberOfRegisterPairs; i++) { 411 ArmManagedRegister current = 412 ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i)); 413 if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) { 414 blocked_register_pairs_[i] = true; 415 } 416 } 417 return Location::RegisterLocation(reg); 418 } 419 420 case Primitive::kPrimFloat: { 421 int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters); 422 return Location::FpuRegisterLocation(reg); 423 } 424 425 case Primitive::kPrimDouble: { 426 int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters); 427 DCHECK_EQ(reg % 2, 0); 428 return Location::FpuRegisterPairLocation(reg, reg + 1); 429 } 430 431 case Primitive::kPrimVoid: 432 LOG(FATAL) << "Unreachable type " << type; 433 } 434 435 return Location(); 436} 437 438void CodeGeneratorARM::SetupBlockedRegisters() const { 439 // Don't allocate the dalvik style register pair passing. 440 blocked_register_pairs_[R1_R2] = true; 441 442 // Stack register, LR and PC are always reserved. 443 blocked_core_registers_[SP] = true; 444 blocked_core_registers_[LR] = true; 445 blocked_core_registers_[PC] = true; 446 447 // Reserve thread register. 448 blocked_core_registers_[TR] = true; 449 450 // Reserve temp register. 451 blocked_core_registers_[IP] = true; 452 453 // TODO: We currently don't use Quick's callee saved registers. 454 // We always save and restore R6 and R7 to make sure we can use three 455 // register pairs for long operations. 456 blocked_core_registers_[R4] = true; 457 blocked_core_registers_[R5] = true; 458 blocked_core_registers_[R8] = true; 459 blocked_core_registers_[R10] = true; 460 blocked_core_registers_[R11] = true; 461 462 blocked_fpu_registers_[S16] = true; 463 blocked_fpu_registers_[S17] = true; 464 blocked_fpu_registers_[S18] = true; 465 blocked_fpu_registers_[S19] = true; 466 blocked_fpu_registers_[S20] = true; 467 blocked_fpu_registers_[S21] = true; 468 blocked_fpu_registers_[S22] = true; 469 blocked_fpu_registers_[S23] = true; 470 blocked_fpu_registers_[S24] = true; 471 blocked_fpu_registers_[S25] = true; 472 blocked_fpu_registers_[S26] = true; 473 blocked_fpu_registers_[S27] = true; 474 blocked_fpu_registers_[S28] = true; 475 blocked_fpu_registers_[S29] = true; 476 blocked_fpu_registers_[S30] = true; 477 blocked_fpu_registers_[S31] = true; 478 479 UpdateBlockedPairRegisters(); 480} 481 482void CodeGeneratorARM::UpdateBlockedPairRegisters() const { 483 for (int i = 0; i < kNumberOfRegisterPairs; i++) { 484 ArmManagedRegister current = 485 ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i)); 486 if (blocked_core_registers_[current.AsRegisterPairLow()] 487 || blocked_core_registers_[current.AsRegisterPairHigh()]) { 488 blocked_register_pairs_[i] = true; 489 } 490 } 491} 492 493InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen) 494 : HGraphVisitor(graph), 495 assembler_(codegen->GetAssembler()), 496 codegen_(codegen) {} 497 498void CodeGeneratorARM::GenerateFrameEntry() { 499 bool skip_overflow_check = 500 IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm); 501 if (!skip_overflow_check) { 502 if (kExplicitStackOverflowCheck) { 503 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM(); 504 AddSlowPath(slow_path); 505 506 __ LoadFromOffset(kLoadWord, IP, TR, Thread::StackEndOffset<kArmWordSize>().Int32Value()); 507 __ cmp(SP, ShifterOperand(IP)); 508 __ b(slow_path->GetEntryLabel(), CC); 509 } else { 510 __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm))); 511 __ LoadFromOffset(kLoadWord, IP, IP, 0); 512 RecordPcInfo(nullptr, 0); 513 } 514 } 515 516 core_spill_mask_ |= (1 << LR | 1 << R6 | 1 << R7); 517 __ PushList(1 << LR | 1 << R6 | 1 << R7); 518 519 // The return PC has already been pushed on the stack. 520 __ AddConstant(SP, -(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize)); 521 __ StoreToOffset(kStoreWord, R0, SP, 0); 522} 523 524void CodeGeneratorARM::GenerateFrameExit() { 525 __ AddConstant(SP, GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize); 526 __ PopList(1 << PC | 1 << R6 | 1 << R7); 527} 528 529void CodeGeneratorARM::Bind(HBasicBlock* block) { 530 __ Bind(GetLabelOf(block)); 531} 532 533Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const { 534 switch (load->GetType()) { 535 case Primitive::kPrimLong: 536 case Primitive::kPrimDouble: 537 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal())); 538 break; 539 540 case Primitive::kPrimInt: 541 case Primitive::kPrimNot: 542 case Primitive::kPrimFloat: 543 return Location::StackSlot(GetStackSlot(load->GetLocal())); 544 545 case Primitive::kPrimBoolean: 546 case Primitive::kPrimByte: 547 case Primitive::kPrimChar: 548 case Primitive::kPrimShort: 549 case Primitive::kPrimVoid: 550 LOG(FATAL) << "Unexpected type " << load->GetType(); 551 } 552 553 LOG(FATAL) << "Unreachable"; 554 return Location(); 555} 556 557Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) { 558 switch (type) { 559 case Primitive::kPrimBoolean: 560 case Primitive::kPrimByte: 561 case Primitive::kPrimChar: 562 case Primitive::kPrimShort: 563 case Primitive::kPrimInt: 564 case Primitive::kPrimNot: { 565 uint32_t index = gp_index_++; 566 uint32_t stack_index = stack_index_++; 567 if (index < calling_convention.GetNumberOfRegisters()) { 568 return Location::RegisterLocation(calling_convention.GetRegisterAt(index)); 569 } else { 570 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index)); 571 } 572 } 573 574 case Primitive::kPrimLong: { 575 uint32_t index = gp_index_; 576 uint32_t stack_index = stack_index_; 577 gp_index_ += 2; 578 stack_index_ += 2; 579 if (index + 1 < calling_convention.GetNumberOfRegisters()) { 580 ArmManagedRegister pair = ArmManagedRegister::FromRegisterPair( 581 calling_convention.GetRegisterPairAt(index)); 582 return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh()); 583 } else if (index + 1 == calling_convention.GetNumberOfRegisters()) { 584 return Location::QuickParameter(index, stack_index); 585 } else { 586 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index)); 587 } 588 } 589 590 case Primitive::kPrimFloat: { 591 uint32_t stack_index = stack_index_++; 592 if (float_index_ % 2 == 0) { 593 float_index_ = std::max(double_index_, float_index_); 594 } 595 if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) { 596 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++)); 597 } else { 598 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index)); 599 } 600 } 601 602 case Primitive::kPrimDouble: { 603 double_index_ = std::max(double_index_, RoundUp(float_index_, 2)); 604 uint32_t stack_index = stack_index_; 605 stack_index_ += 2; 606 if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) { 607 uint32_t index = double_index_; 608 double_index_ += 2; 609 return Location::FpuRegisterPairLocation( 610 calling_convention.GetFpuRegisterAt(index), 611 calling_convention.GetFpuRegisterAt(index + 1)); 612 } else { 613 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index)); 614 } 615 } 616 617 case Primitive::kPrimVoid: 618 LOG(FATAL) << "Unexpected parameter type " << type; 619 break; 620 } 621 return Location(); 622} 623 624Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) { 625 switch (type) { 626 case Primitive::kPrimBoolean: 627 case Primitive::kPrimByte: 628 case Primitive::kPrimChar: 629 case Primitive::kPrimShort: 630 case Primitive::kPrimInt: 631 case Primitive::kPrimNot: { 632 return Location::RegisterLocation(R0); 633 } 634 635 case Primitive::kPrimFloat: { 636 return Location::FpuRegisterLocation(S0); 637 } 638 639 case Primitive::kPrimLong: { 640 return Location::RegisterPairLocation(R0, R1); 641 } 642 643 case Primitive::kPrimDouble: { 644 return Location::FpuRegisterPairLocation(S0, S1); 645 } 646 647 case Primitive::kPrimVoid: 648 return Location(); 649 } 650 UNREACHABLE(); 651 return Location(); 652} 653 654void CodeGeneratorARM::Move32(Location destination, Location source) { 655 if (source.Equals(destination)) { 656 return; 657 } 658 if (destination.IsRegister()) { 659 if (source.IsRegister()) { 660 __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>()); 661 } else if (source.IsFpuRegister()) { 662 __ vmovrs(destination.AsRegister<Register>(), source.AsFpuRegister<SRegister>()); 663 } else { 664 __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex()); 665 } 666 } else if (destination.IsFpuRegister()) { 667 if (source.IsRegister()) { 668 __ vmovsr(destination.AsFpuRegister<SRegister>(), source.AsRegister<Register>()); 669 } else if (source.IsFpuRegister()) { 670 __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>()); 671 } else { 672 __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex()); 673 } 674 } else { 675 DCHECK(destination.IsStackSlot()) << destination; 676 if (source.IsRegister()) { 677 __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex()); 678 } else if (source.IsFpuRegister()) { 679 __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex()); 680 } else { 681 DCHECK(source.IsStackSlot()) << source; 682 __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex()); 683 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 684 } 685 } 686} 687 688void CodeGeneratorARM::Move64(Location destination, Location source) { 689 if (source.Equals(destination)) { 690 return; 691 } 692 if (destination.IsRegisterPair()) { 693 if (source.IsRegisterPair()) { 694 EmitParallelMoves( 695 Location::RegisterLocation(source.AsRegisterPairHigh<Register>()), 696 Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()), 697 Location::RegisterLocation(source.AsRegisterPairLow<Register>()), 698 Location::RegisterLocation(destination.AsRegisterPairLow<Register>())); 699 } else if (source.IsFpuRegister()) { 700 UNIMPLEMENTED(FATAL); 701 } else if (source.IsQuickParameter()) { 702 uint16_t register_index = source.GetQuickParameterRegisterIndex(); 703 uint16_t stack_index = source.GetQuickParameterStackIndex(); 704 InvokeDexCallingConvention calling_convention; 705 EmitParallelMoves( 706 Location::RegisterLocation(calling_convention.GetRegisterAt(register_index)), 707 Location::RegisterLocation(destination.AsRegisterPairLow<Register>()), 708 Location::StackSlot( 709 calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize()), 710 Location::RegisterLocation(destination.AsRegisterPairHigh<Register>())); 711 } else { 712 // No conflict possible, so just do the moves. 713 DCHECK(source.IsDoubleStackSlot()); 714 if (destination.AsRegisterPairLow<Register>() == R1) { 715 DCHECK_EQ(destination.AsRegisterPairHigh<Register>(), R2); 716 __ LoadFromOffset(kLoadWord, R1, SP, source.GetStackIndex()); 717 __ LoadFromOffset(kLoadWord, R2, SP, source.GetHighStackIndex(kArmWordSize)); 718 } else { 719 __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(), 720 SP, source.GetStackIndex()); 721 } 722 } 723 } else if (destination.IsFpuRegisterPair()) { 724 if (source.IsDoubleStackSlot()) { 725 __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()), 726 SP, 727 source.GetStackIndex()); 728 } else { 729 UNIMPLEMENTED(FATAL); 730 } 731 } else if (destination.IsQuickParameter()) { 732 InvokeDexCallingConvention calling_convention; 733 uint16_t register_index = destination.GetQuickParameterRegisterIndex(); 734 uint16_t stack_index = destination.GetQuickParameterStackIndex(); 735 if (source.IsRegisterPair()) { 736 UNIMPLEMENTED(FATAL); 737 } else if (source.IsFpuRegister()) { 738 UNIMPLEMENTED(FATAL); 739 } else { 740 DCHECK(source.IsDoubleStackSlot()); 741 EmitParallelMoves( 742 Location::StackSlot(source.GetStackIndex()), 743 Location::RegisterLocation(calling_convention.GetRegisterAt(register_index)), 744 Location::StackSlot(source.GetHighStackIndex(kArmWordSize)), 745 Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index + 1))); 746 } 747 } else { 748 DCHECK(destination.IsDoubleStackSlot()); 749 if (source.IsRegisterPair()) { 750 // No conflict possible, so just do the moves. 751 if (source.AsRegisterPairLow<Register>() == R1) { 752 DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2); 753 __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex()); 754 __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize)); 755 } else { 756 __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(), 757 SP, destination.GetStackIndex()); 758 } 759 } else if (source.IsQuickParameter()) { 760 InvokeDexCallingConvention calling_convention; 761 uint16_t register_index = source.GetQuickParameterRegisterIndex(); 762 uint16_t stack_index = source.GetQuickParameterStackIndex(); 763 // Just move the low part. The only time a source is a quick parameter is 764 // when moving the parameter to its stack locations. And the (Java) caller 765 // of this method has already done that. 766 __ StoreToOffset(kStoreWord, calling_convention.GetRegisterAt(register_index), 767 SP, destination.GetStackIndex()); 768 DCHECK_EQ(calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize(), 769 static_cast<size_t>(destination.GetHighStackIndex(kArmWordSize))); 770 } else if (source.IsFpuRegisterPair()) { 771 __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()), 772 SP, 773 destination.GetStackIndex()); 774 } else { 775 DCHECK(source.IsDoubleStackSlot()); 776 EmitParallelMoves( 777 Location::StackSlot(source.GetStackIndex()), 778 Location::StackSlot(destination.GetStackIndex()), 779 Location::StackSlot(source.GetHighStackIndex(kArmWordSize)), 780 Location::StackSlot(destination.GetHighStackIndex(kArmWordSize))); 781 } 782 } 783} 784 785void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) { 786 LocationSummary* locations = instruction->GetLocations(); 787 if (locations != nullptr && locations->Out().Equals(location)) { 788 return; 789 } 790 791 if (locations != nullptr && locations->Out().IsConstant()) { 792 HConstant* const_to_move = locations->Out().GetConstant(); 793 if (const_to_move->IsIntConstant()) { 794 int32_t value = const_to_move->AsIntConstant()->GetValue(); 795 if (location.IsRegister()) { 796 __ LoadImmediate(location.AsRegister<Register>(), value); 797 } else { 798 DCHECK(location.IsStackSlot()); 799 __ LoadImmediate(IP, value); 800 __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex()); 801 } 802 } else if (const_to_move->IsLongConstant()) { 803 int64_t value = const_to_move->AsLongConstant()->GetValue(); 804 if (location.IsRegisterPair()) { 805 __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value)); 806 __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value)); 807 } else { 808 DCHECK(location.IsDoubleStackSlot()); 809 __ LoadImmediate(IP, Low32Bits(value)); 810 __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex()); 811 __ LoadImmediate(IP, High32Bits(value)); 812 __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize)); 813 } 814 } 815 } else if (instruction->IsLoadLocal()) { 816 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal()); 817 switch (instruction->GetType()) { 818 case Primitive::kPrimBoolean: 819 case Primitive::kPrimByte: 820 case Primitive::kPrimChar: 821 case Primitive::kPrimShort: 822 case Primitive::kPrimInt: 823 case Primitive::kPrimNot: 824 case Primitive::kPrimFloat: 825 Move32(location, Location::StackSlot(stack_slot)); 826 break; 827 828 case Primitive::kPrimLong: 829 case Primitive::kPrimDouble: 830 Move64(location, Location::DoubleStackSlot(stack_slot)); 831 break; 832 833 default: 834 LOG(FATAL) << "Unexpected type " << instruction->GetType(); 835 } 836 } else if (instruction->IsTemporary()) { 837 Location temp_location = GetTemporaryLocation(instruction->AsTemporary()); 838 if (temp_location.IsStackSlot()) { 839 Move32(location, temp_location); 840 } else { 841 DCHECK(temp_location.IsDoubleStackSlot()); 842 Move64(location, temp_location); 843 } 844 } else { 845 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary()); 846 switch (instruction->GetType()) { 847 case Primitive::kPrimBoolean: 848 case Primitive::kPrimByte: 849 case Primitive::kPrimChar: 850 case Primitive::kPrimShort: 851 case Primitive::kPrimNot: 852 case Primitive::kPrimInt: 853 case Primitive::kPrimFloat: 854 Move32(location, locations->Out()); 855 break; 856 857 case Primitive::kPrimLong: 858 case Primitive::kPrimDouble: 859 Move64(location, locations->Out()); 860 break; 861 862 default: 863 LOG(FATAL) << "Unexpected type " << instruction->GetType(); 864 } 865 } 866} 867 868void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset, 869 HInstruction* instruction, 870 uint32_t dex_pc) { 871 __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset); 872 __ blx(LR); 873 RecordPcInfo(instruction, dex_pc); 874 DCHECK(instruction->IsSuspendCheck() 875 || instruction->IsBoundsCheck() 876 || instruction->IsNullCheck() 877 || instruction->IsDivZeroCheck() 878 || instruction->GetLocations()->CanCall() 879 || !IsLeafMethod()); 880} 881 882void LocationsBuilderARM::VisitGoto(HGoto* got) { 883 got->SetLocations(nullptr); 884} 885 886void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) { 887 HBasicBlock* successor = got->GetSuccessor(); 888 DCHECK(!successor->IsExitBlock()); 889 890 HBasicBlock* block = got->GetBlock(); 891 HInstruction* previous = got->GetPrevious(); 892 893 HLoopInformation* info = block->GetLoopInformation(); 894 if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) { 895 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck()); 896 GenerateSuspendCheck(info->GetSuspendCheck(), successor); 897 return; 898 } 899 900 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) { 901 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr); 902 } 903 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) { 904 __ b(codegen_->GetLabelOf(successor)); 905 } 906} 907 908void LocationsBuilderARM::VisitExit(HExit* exit) { 909 exit->SetLocations(nullptr); 910} 911 912void InstructionCodeGeneratorARM::VisitExit(HExit* exit) { 913 UNUSED(exit); 914 if (kIsDebugBuild) { 915 __ Comment("Unreachable"); 916 __ bkpt(0); 917 } 918} 919 920void LocationsBuilderARM::VisitIf(HIf* if_instr) { 921 LocationSummary* locations = 922 new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall); 923 HInstruction* cond = if_instr->InputAt(0); 924 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) { 925 locations->SetInAt(0, Location::RequiresRegister()); 926 } 927} 928 929void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) { 930 HInstruction* cond = if_instr->InputAt(0); 931 if (cond->IsIntConstant()) { 932 // Constant condition, statically compared against 1. 933 int32_t cond_value = cond->AsIntConstant()->GetValue(); 934 if (cond_value == 1) { 935 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), 936 if_instr->IfTrueSuccessor())) { 937 __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor())); 938 } 939 return; 940 } else { 941 DCHECK_EQ(cond_value, 0); 942 } 943 } else { 944 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) { 945 // Condition has been materialized, compare the output to 0 946 DCHECK(if_instr->GetLocations()->InAt(0).IsRegister()); 947 __ cmp(if_instr->GetLocations()->InAt(0).AsRegister<Register>(), 948 ShifterOperand(0)); 949 __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE); 950 } else { 951 // Condition has not been materialized, use its inputs as the 952 // comparison and its condition as the branch condition. 953 LocationSummary* locations = cond->GetLocations(); 954 Register left = locations->InAt(0).AsRegister<Register>(); 955 if (locations->InAt(1).IsRegister()) { 956 __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>())); 957 } else { 958 DCHECK(locations->InAt(1).IsConstant()); 959 int32_t value = 960 locations->InAt(1).GetConstant()->AsIntConstant()->GetValue(); 961 ShifterOperand operand; 962 if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) { 963 __ cmp(left, operand); 964 } else { 965 Register temp = IP; 966 __ LoadImmediate(temp, value); 967 __ cmp(left, ShifterOperand(temp)); 968 } 969 } 970 __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), 971 ARMCondition(cond->AsCondition()->GetCondition())); 972 } 973 } 974 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), 975 if_instr->IfFalseSuccessor())) { 976 __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor())); 977 } 978} 979 980 981void LocationsBuilderARM::VisitCondition(HCondition* comp) { 982 LocationSummary* locations = 983 new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall); 984 locations->SetInAt(0, Location::RequiresRegister()); 985 locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1))); 986 if (comp->NeedsMaterialization()) { 987 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 988 } 989} 990 991void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) { 992 if (!comp->NeedsMaterialization()) return; 993 LocationSummary* locations = comp->GetLocations(); 994 Register left = locations->InAt(0).AsRegister<Register>(); 995 996 if (locations->InAt(1).IsRegister()) { 997 __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>())); 998 } else { 999 DCHECK(locations->InAt(1).IsConstant()); 1000 int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue(); 1001 ShifterOperand operand; 1002 if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) { 1003 __ cmp(left, operand); 1004 } else { 1005 Register temp = IP; 1006 __ LoadImmediate(temp, value); 1007 __ cmp(left, ShifterOperand(temp)); 1008 } 1009 } 1010 __ it(ARMCondition(comp->GetCondition()), kItElse); 1011 __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(1), 1012 ARMCondition(comp->GetCondition())); 1013 __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(0), 1014 ARMOppositeCondition(comp->GetCondition())); 1015} 1016 1017void LocationsBuilderARM::VisitEqual(HEqual* comp) { 1018 VisitCondition(comp); 1019} 1020 1021void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) { 1022 VisitCondition(comp); 1023} 1024 1025void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) { 1026 VisitCondition(comp); 1027} 1028 1029void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) { 1030 VisitCondition(comp); 1031} 1032 1033void LocationsBuilderARM::VisitLessThan(HLessThan* comp) { 1034 VisitCondition(comp); 1035} 1036 1037void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) { 1038 VisitCondition(comp); 1039} 1040 1041void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) { 1042 VisitCondition(comp); 1043} 1044 1045void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) { 1046 VisitCondition(comp); 1047} 1048 1049void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) { 1050 VisitCondition(comp); 1051} 1052 1053void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) { 1054 VisitCondition(comp); 1055} 1056 1057void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) { 1058 VisitCondition(comp); 1059} 1060 1061void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) { 1062 VisitCondition(comp); 1063} 1064 1065void LocationsBuilderARM::VisitLocal(HLocal* local) { 1066 local->SetLocations(nullptr); 1067} 1068 1069void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) { 1070 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock()); 1071} 1072 1073void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) { 1074 load->SetLocations(nullptr); 1075} 1076 1077void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) { 1078 // Nothing to do, this is driven by the code generator. 1079 UNUSED(load); 1080} 1081 1082void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) { 1083 LocationSummary* locations = 1084 new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall); 1085 switch (store->InputAt(1)->GetType()) { 1086 case Primitive::kPrimBoolean: 1087 case Primitive::kPrimByte: 1088 case Primitive::kPrimChar: 1089 case Primitive::kPrimShort: 1090 case Primitive::kPrimInt: 1091 case Primitive::kPrimNot: 1092 case Primitive::kPrimFloat: 1093 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal()))); 1094 break; 1095 1096 case Primitive::kPrimLong: 1097 case Primitive::kPrimDouble: 1098 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal()))); 1099 break; 1100 1101 default: 1102 LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType(); 1103 } 1104} 1105 1106void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) { 1107 UNUSED(store); 1108} 1109 1110void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) { 1111 LocationSummary* locations = 1112 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 1113 locations->SetOut(Location::ConstantLocation(constant)); 1114} 1115 1116void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) { 1117 // Will be generated at use site. 1118 UNUSED(constant); 1119} 1120 1121void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) { 1122 LocationSummary* locations = 1123 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 1124 locations->SetOut(Location::ConstantLocation(constant)); 1125} 1126 1127void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) { 1128 // Will be generated at use site. 1129 UNUSED(constant); 1130} 1131 1132void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) { 1133 LocationSummary* locations = 1134 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 1135 locations->SetOut(Location::ConstantLocation(constant)); 1136} 1137 1138void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) { 1139 // Will be generated at use site. 1140 UNUSED(constant); 1141} 1142 1143void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) { 1144 LocationSummary* locations = 1145 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 1146 locations->SetOut(Location::ConstantLocation(constant)); 1147} 1148 1149void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) { 1150 // Will be generated at use site. 1151 UNUSED(constant); 1152} 1153 1154void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) { 1155 ret->SetLocations(nullptr); 1156} 1157 1158void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) { 1159 UNUSED(ret); 1160 codegen_->GenerateFrameExit(); 1161} 1162 1163void LocationsBuilderARM::VisitReturn(HReturn* ret) { 1164 LocationSummary* locations = 1165 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall); 1166 locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType())); 1167} 1168 1169void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) { 1170 UNUSED(ret); 1171 codegen_->GenerateFrameExit(); 1172} 1173 1174void LocationsBuilderARM::VisitInvokeStatic(HInvokeStatic* invoke) { 1175 HandleInvoke(invoke); 1176} 1177 1178void CodeGeneratorARM::LoadCurrentMethod(Register reg) { 1179 __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset); 1180} 1181 1182void InstructionCodeGeneratorARM::VisitInvokeStatic(HInvokeStatic* invoke) { 1183 Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>(); 1184 1185 // TODO: Implement all kinds of calls: 1186 // 1) boot -> boot 1187 // 2) app -> boot 1188 // 3) app -> app 1189 // 1190 // Currently we implement the app -> app logic, which looks up in the resolve cache. 1191 1192 // temp = method; 1193 codegen_->LoadCurrentMethod(temp); 1194 // temp = temp->dex_cache_resolved_methods_; 1195 __ LoadFromOffset( 1196 kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()); 1197 // temp = temp[index_in_cache] 1198 __ LoadFromOffset( 1199 kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetIndexInDexCache())); 1200 // LR = temp[offset_of_quick_compiled_code] 1201 __ LoadFromOffset(kLoadWord, LR, temp, 1202 mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset( 1203 kArmWordSize).Int32Value()); 1204 // LR() 1205 __ blx(LR); 1206 1207 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 1208 DCHECK(!codegen_->IsLeafMethod()); 1209} 1210 1211void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) { 1212 LocationSummary* locations = 1213 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall); 1214 locations->AddTemp(Location::RegisterLocation(R0)); 1215 1216 InvokeDexCallingConventionVisitor calling_convention_visitor; 1217 for (size_t i = 0; i < invoke->InputCount(); i++) { 1218 HInstruction* input = invoke->InputAt(i); 1219 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType())); 1220 } 1221 1222 locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType())); 1223} 1224 1225void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) { 1226 HandleInvoke(invoke); 1227} 1228 1229void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) { 1230 Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>(); 1231 uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() + 1232 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry); 1233 LocationSummary* locations = invoke->GetLocations(); 1234 Location receiver = locations->InAt(0); 1235 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); 1236 // temp = object->GetClass(); 1237 if (receiver.IsStackSlot()) { 1238 __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex()); 1239 __ LoadFromOffset(kLoadWord, temp, temp, class_offset); 1240 } else { 1241 __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset); 1242 } 1243 // temp = temp->GetMethodAt(method_offset); 1244 uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset( 1245 kArmWordSize).Int32Value(); 1246 __ LoadFromOffset(kLoadWord, temp, temp, method_offset); 1247 // LR = temp->GetEntryPoint(); 1248 __ LoadFromOffset(kLoadWord, LR, temp, entry_point); 1249 // LR(); 1250 __ blx(LR); 1251 DCHECK(!codegen_->IsLeafMethod()); 1252 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 1253} 1254 1255void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) { 1256 HandleInvoke(invoke); 1257 // Add the hidden argument. 1258 invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12)); 1259} 1260 1261void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) { 1262 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError. 1263 Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>(); 1264 uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() + 1265 (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry); 1266 LocationSummary* locations = invoke->GetLocations(); 1267 Location receiver = locations->InAt(0); 1268 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); 1269 1270 // Set the hidden argument. 1271 __ LoadImmediate(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(), 1272 invoke->GetDexMethodIndex()); 1273 1274 // temp = object->GetClass(); 1275 if (receiver.IsStackSlot()) { 1276 __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex()); 1277 __ LoadFromOffset(kLoadWord, temp, temp, class_offset); 1278 } else { 1279 __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset); 1280 } 1281 // temp = temp->GetImtEntryAt(method_offset); 1282 uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset( 1283 kArmWordSize).Int32Value(); 1284 __ LoadFromOffset(kLoadWord, temp, temp, method_offset); 1285 // LR = temp->GetEntryPoint(); 1286 __ LoadFromOffset(kLoadWord, LR, temp, entry_point); 1287 // LR(); 1288 __ blx(LR); 1289 DCHECK(!codegen_->IsLeafMethod()); 1290 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 1291} 1292 1293void LocationsBuilderARM::VisitNeg(HNeg* neg) { 1294 LocationSummary* locations = 1295 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall); 1296 switch (neg->GetResultType()) { 1297 case Primitive::kPrimInt: 1298 case Primitive::kPrimLong: { 1299 bool output_overlaps = (neg->GetResultType() == Primitive::kPrimLong); 1300 locations->SetInAt(0, Location::RequiresRegister()); 1301 locations->SetOut(Location::RequiresRegister(), output_overlaps); 1302 break; 1303 } 1304 1305 case Primitive::kPrimFloat: 1306 case Primitive::kPrimDouble: 1307 locations->SetInAt(0, Location::RequiresFpuRegister()); 1308 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1309 break; 1310 1311 default: 1312 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType(); 1313 } 1314} 1315 1316void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) { 1317 LocationSummary* locations = neg->GetLocations(); 1318 Location out = locations->Out(); 1319 Location in = locations->InAt(0); 1320 switch (neg->GetResultType()) { 1321 case Primitive::kPrimInt: 1322 DCHECK(in.IsRegister()); 1323 __ rsb(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(0)); 1324 break; 1325 1326 case Primitive::kPrimLong: 1327 DCHECK(in.IsRegisterPair()); 1328 // out.lo = 0 - in.lo (and update the carry/borrow (C) flag) 1329 __ rsbs(out.AsRegisterPairLow<Register>(), 1330 in.AsRegisterPairLow<Register>(), 1331 ShifterOperand(0)); 1332 // We cannot emit an RSC (Reverse Subtract with Carry) 1333 // instruction here, as it does not exist in the Thumb-2 1334 // instruction set. We use the following approach 1335 // using SBC and SUB instead. 1336 // 1337 // out.hi = -C 1338 __ sbc(out.AsRegisterPairHigh<Register>(), 1339 out.AsRegisterPairHigh<Register>(), 1340 ShifterOperand(out.AsRegisterPairHigh<Register>())); 1341 // out.hi = out.hi - in.hi 1342 __ sub(out.AsRegisterPairHigh<Register>(), 1343 out.AsRegisterPairHigh<Register>(), 1344 ShifterOperand(in.AsRegisterPairHigh<Register>())); 1345 break; 1346 1347 case Primitive::kPrimFloat: 1348 DCHECK(in.IsFpuRegister()); 1349 __ vnegs(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>()); 1350 break; 1351 1352 case Primitive::kPrimDouble: 1353 DCHECK(in.IsFpuRegisterPair()); 1354 __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 1355 FromLowSToD(in.AsFpuRegisterPairLow<SRegister>())); 1356 break; 1357 1358 default: 1359 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType(); 1360 } 1361} 1362 1363void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) { 1364 Primitive::Type result_type = conversion->GetResultType(); 1365 Primitive::Type input_type = conversion->GetInputType(); 1366 DCHECK_NE(result_type, input_type); 1367 1368 // The float-to-long and double-to-long type conversions rely on a 1369 // call to the runtime. 1370 LocationSummary::CallKind call_kind = 1371 ((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble) 1372 && result_type == Primitive::kPrimLong) 1373 ? LocationSummary::kCall 1374 : LocationSummary::kNoCall; 1375 LocationSummary* locations = 1376 new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind); 1377 1378 switch (result_type) { 1379 case Primitive::kPrimByte: 1380 switch (input_type) { 1381 case Primitive::kPrimShort: 1382 case Primitive::kPrimInt: 1383 case Primitive::kPrimChar: 1384 // Processing a Dex `int-to-byte' instruction. 1385 locations->SetInAt(0, Location::RequiresRegister()); 1386 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1387 break; 1388 1389 default: 1390 LOG(FATAL) << "Unexpected type conversion from " << input_type 1391 << " to " << result_type; 1392 } 1393 break; 1394 1395 case Primitive::kPrimShort: 1396 switch (input_type) { 1397 case Primitive::kPrimByte: 1398 case Primitive::kPrimInt: 1399 case Primitive::kPrimChar: 1400 // Processing a Dex `int-to-short' instruction. 1401 locations->SetInAt(0, Location::RequiresRegister()); 1402 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1403 break; 1404 1405 default: 1406 LOG(FATAL) << "Unexpected type conversion from " << input_type 1407 << " to " << result_type; 1408 } 1409 break; 1410 1411 case Primitive::kPrimInt: 1412 switch (input_type) { 1413 case Primitive::kPrimLong: 1414 // Processing a Dex `long-to-int' instruction. 1415 locations->SetInAt(0, Location::Any()); 1416 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1417 break; 1418 1419 case Primitive::kPrimFloat: 1420 // Processing a Dex `float-to-int' instruction. 1421 locations->SetInAt(0, Location::RequiresFpuRegister()); 1422 locations->SetOut(Location::RequiresRegister()); 1423 locations->AddTemp(Location::RequiresFpuRegister()); 1424 break; 1425 1426 case Primitive::kPrimDouble: 1427 // Processing a Dex `double-to-int' instruction. 1428 locations->SetInAt(0, Location::RequiresFpuRegister()); 1429 locations->SetOut(Location::RequiresRegister()); 1430 locations->AddTemp(Location::RequiresFpuRegister()); 1431 break; 1432 1433 default: 1434 LOG(FATAL) << "Unexpected type conversion from " << input_type 1435 << " to " << result_type; 1436 } 1437 break; 1438 1439 case Primitive::kPrimLong: 1440 switch (input_type) { 1441 case Primitive::kPrimByte: 1442 case Primitive::kPrimShort: 1443 case Primitive::kPrimInt: 1444 case Primitive::kPrimChar: 1445 // Processing a Dex `int-to-long' instruction. 1446 locations->SetInAt(0, Location::RequiresRegister()); 1447 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1448 break; 1449 1450 case Primitive::kPrimFloat: { 1451 // Processing a Dex `float-to-long' instruction. 1452 InvokeRuntimeCallingConvention calling_convention; 1453 locations->SetInAt(0, Location::FpuRegisterLocation( 1454 calling_convention.GetFpuRegisterAt(0))); 1455 locations->SetOut(Location::RegisterPairLocation(R0, R1)); 1456 break; 1457 } 1458 1459 case Primitive::kPrimDouble: { 1460 // Processing a Dex `double-to-long' instruction. 1461 InvokeRuntimeCallingConvention calling_convention; 1462 locations->SetInAt(0, Location::FpuRegisterPairLocation( 1463 calling_convention.GetFpuRegisterAt(0), 1464 calling_convention.GetFpuRegisterAt(1))); 1465 locations->SetOut(Location::RegisterPairLocation(R0, R1)); 1466 break; 1467 } 1468 1469 default: 1470 LOG(FATAL) << "Unexpected type conversion from " << input_type 1471 << " to " << result_type; 1472 } 1473 break; 1474 1475 case Primitive::kPrimChar: 1476 switch (input_type) { 1477 case Primitive::kPrimByte: 1478 case Primitive::kPrimShort: 1479 case Primitive::kPrimInt: 1480 // Processing a Dex `int-to-char' instruction. 1481 locations->SetInAt(0, Location::RequiresRegister()); 1482 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1483 break; 1484 1485 default: 1486 LOG(FATAL) << "Unexpected type conversion from " << input_type 1487 << " to " << result_type; 1488 } 1489 break; 1490 1491 case Primitive::kPrimFloat: 1492 switch (input_type) { 1493 case Primitive::kPrimByte: 1494 case Primitive::kPrimShort: 1495 case Primitive::kPrimInt: 1496 case Primitive::kPrimChar: 1497 // Processing a Dex `int-to-float' instruction. 1498 locations->SetInAt(0, Location::RequiresRegister()); 1499 locations->SetOut(Location::RequiresFpuRegister()); 1500 break; 1501 1502 case Primitive::kPrimLong: 1503 // Processing a Dex `long-to-float' instruction. 1504 locations->SetInAt(0, Location::RequiresRegister()); 1505 locations->SetOut(Location::RequiresFpuRegister()); 1506 locations->AddTemp(Location::RequiresRegister()); 1507 locations->AddTemp(Location::RequiresRegister()); 1508 locations->AddTemp(Location::RequiresFpuRegister()); 1509 locations->AddTemp(Location::RequiresFpuRegister()); 1510 break; 1511 1512 case Primitive::kPrimDouble: 1513 // Processing a Dex `double-to-float' instruction. 1514 locations->SetInAt(0, Location::RequiresFpuRegister()); 1515 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1516 break; 1517 1518 default: 1519 LOG(FATAL) << "Unexpected type conversion from " << input_type 1520 << " to " << result_type; 1521 }; 1522 break; 1523 1524 case Primitive::kPrimDouble: 1525 switch (input_type) { 1526 case Primitive::kPrimByte: 1527 case Primitive::kPrimShort: 1528 case Primitive::kPrimInt: 1529 case Primitive::kPrimChar: 1530 // Processing a Dex `int-to-double' instruction. 1531 locations->SetInAt(0, Location::RequiresRegister()); 1532 locations->SetOut(Location::RequiresFpuRegister()); 1533 break; 1534 1535 case Primitive::kPrimLong: 1536 // Processing a Dex `long-to-double' instruction. 1537 locations->SetInAt(0, Location::RequiresRegister()); 1538 locations->SetOut(Location::RequiresFpuRegister()); 1539 locations->AddTemp(Location::RequiresRegister()); 1540 locations->AddTemp(Location::RequiresRegister()); 1541 locations->AddTemp(Location::RequiresFpuRegister()); 1542 break; 1543 1544 case Primitive::kPrimFloat: 1545 // Processing a Dex `float-to-double' instruction. 1546 locations->SetInAt(0, Location::RequiresFpuRegister()); 1547 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1548 break; 1549 1550 default: 1551 LOG(FATAL) << "Unexpected type conversion from " << input_type 1552 << " to " << result_type; 1553 }; 1554 break; 1555 1556 default: 1557 LOG(FATAL) << "Unexpected type conversion from " << input_type 1558 << " to " << result_type; 1559 } 1560} 1561 1562void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) { 1563 LocationSummary* locations = conversion->GetLocations(); 1564 Location out = locations->Out(); 1565 Location in = locations->InAt(0); 1566 Primitive::Type result_type = conversion->GetResultType(); 1567 Primitive::Type input_type = conversion->GetInputType(); 1568 DCHECK_NE(result_type, input_type); 1569 switch (result_type) { 1570 case Primitive::kPrimByte: 1571 switch (input_type) { 1572 case Primitive::kPrimShort: 1573 case Primitive::kPrimInt: 1574 case Primitive::kPrimChar: 1575 // Processing a Dex `int-to-byte' instruction. 1576 __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 8); 1577 break; 1578 1579 default: 1580 LOG(FATAL) << "Unexpected type conversion from " << input_type 1581 << " to " << result_type; 1582 } 1583 break; 1584 1585 case Primitive::kPrimShort: 1586 switch (input_type) { 1587 case Primitive::kPrimByte: 1588 case Primitive::kPrimInt: 1589 case Primitive::kPrimChar: 1590 // Processing a Dex `int-to-short' instruction. 1591 __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16); 1592 break; 1593 1594 default: 1595 LOG(FATAL) << "Unexpected type conversion from " << input_type 1596 << " to " << result_type; 1597 } 1598 break; 1599 1600 case Primitive::kPrimInt: 1601 switch (input_type) { 1602 case Primitive::kPrimLong: 1603 // Processing a Dex `long-to-int' instruction. 1604 DCHECK(out.IsRegister()); 1605 if (in.IsRegisterPair()) { 1606 __ Mov(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>()); 1607 } else if (in.IsDoubleStackSlot()) { 1608 __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), SP, in.GetStackIndex()); 1609 } else { 1610 DCHECK(in.IsConstant()); 1611 DCHECK(in.GetConstant()->IsLongConstant()); 1612 int64_t value = in.GetConstant()->AsLongConstant()->GetValue(); 1613 __ LoadImmediate(out.AsRegister<Register>(), static_cast<int32_t>(value)); 1614 } 1615 break; 1616 1617 case Primitive::kPrimFloat: { 1618 // Processing a Dex `float-to-int' instruction. 1619 SRegister temp = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>(); 1620 __ vmovs(temp, in.AsFpuRegister<SRegister>()); 1621 __ vcvtis(temp, temp); 1622 __ vmovrs(out.AsRegister<Register>(), temp); 1623 break; 1624 } 1625 1626 case Primitive::kPrimDouble: { 1627 // Processing a Dex `double-to-int' instruction. 1628 SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>(); 1629 DRegister temp_d = FromLowSToD(temp_s); 1630 __ vmovd(temp_d, FromLowSToD(in.AsFpuRegisterPairLow<SRegister>())); 1631 __ vcvtid(temp_s, temp_d); 1632 __ vmovrs(out.AsRegister<Register>(), temp_s); 1633 break; 1634 } 1635 1636 default: 1637 LOG(FATAL) << "Unexpected type conversion from " << input_type 1638 << " to " << result_type; 1639 } 1640 break; 1641 1642 case Primitive::kPrimLong: 1643 switch (input_type) { 1644 case Primitive::kPrimByte: 1645 case Primitive::kPrimShort: 1646 case Primitive::kPrimInt: 1647 case Primitive::kPrimChar: 1648 // Processing a Dex `int-to-long' instruction. 1649 DCHECK(out.IsRegisterPair()); 1650 DCHECK(in.IsRegister()); 1651 __ Mov(out.AsRegisterPairLow<Register>(), in.AsRegister<Register>()); 1652 // Sign extension. 1653 __ Asr(out.AsRegisterPairHigh<Register>(), 1654 out.AsRegisterPairLow<Register>(), 1655 31); 1656 break; 1657 1658 case Primitive::kPrimFloat: 1659 // Processing a Dex `float-to-long' instruction. 1660 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pF2l), 1661 conversion, 1662 conversion->GetDexPc()); 1663 break; 1664 1665 case Primitive::kPrimDouble: 1666 // Processing a Dex `double-to-long' instruction. 1667 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pD2l), 1668 conversion, 1669 conversion->GetDexPc()); 1670 break; 1671 1672 default: 1673 LOG(FATAL) << "Unexpected type conversion from " << input_type 1674 << " to " << result_type; 1675 } 1676 break; 1677 1678 case Primitive::kPrimChar: 1679 switch (input_type) { 1680 case Primitive::kPrimByte: 1681 case Primitive::kPrimShort: 1682 case Primitive::kPrimInt: 1683 // Processing a Dex `int-to-char' instruction. 1684 __ ubfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16); 1685 break; 1686 1687 default: 1688 LOG(FATAL) << "Unexpected type conversion from " << input_type 1689 << " to " << result_type; 1690 } 1691 break; 1692 1693 case Primitive::kPrimFloat: 1694 switch (input_type) { 1695 case Primitive::kPrimByte: 1696 case Primitive::kPrimShort: 1697 case Primitive::kPrimInt: 1698 case Primitive::kPrimChar: { 1699 // Processing a Dex `int-to-float' instruction. 1700 __ vmovsr(out.AsFpuRegister<SRegister>(), in.AsRegister<Register>()); 1701 __ vcvtsi(out.AsFpuRegister<SRegister>(), out.AsFpuRegister<SRegister>()); 1702 break; 1703 } 1704 1705 case Primitive::kPrimLong: { 1706 // Processing a Dex `long-to-float' instruction. 1707 Register low = in.AsRegisterPairLow<Register>(); 1708 Register high = in.AsRegisterPairHigh<Register>(); 1709 SRegister output = out.AsFpuRegister<SRegister>(); 1710 Register constant_low = locations->GetTemp(0).AsRegister<Register>(); 1711 Register constant_high = locations->GetTemp(1).AsRegister<Register>(); 1712 SRegister temp1_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>(); 1713 DRegister temp1_d = FromLowSToD(temp1_s); 1714 SRegister temp2_s = locations->GetTemp(3).AsFpuRegisterPairLow<SRegister>(); 1715 DRegister temp2_d = FromLowSToD(temp2_s); 1716 1717 // Operations use doubles for precision reasons (each 32-bit 1718 // half of a long fits in the 53-bit mantissa of a double, 1719 // but not in the 24-bit mantissa of a float). This is 1720 // especially important for the low bits. The result is 1721 // eventually converted to float. 1722 1723 // temp1_d = int-to-double(high) 1724 __ vmovsr(temp1_s, high); 1725 __ vcvtdi(temp1_d, temp1_s); 1726 // Using vmovd to load the `k2Pow32EncodingForDouble` constant 1727 // as an immediate value into `temp2_d` does not work, as 1728 // this instruction only transfers 8 significant bits of its 1729 // immediate operand. Instead, use two 32-bit core 1730 // registers to load `k2Pow32EncodingForDouble` into 1731 // `temp2_d`. 1732 __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble)); 1733 __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble)); 1734 __ vmovdrr(temp2_d, constant_low, constant_high); 1735 // temp1_d = temp1_d * 2^32 1736 __ vmuld(temp1_d, temp1_d, temp2_d); 1737 // temp2_d = unsigned-to-double(low) 1738 __ vmovsr(temp2_s, low); 1739 __ vcvtdu(temp2_d, temp2_s); 1740 // temp1_d = temp1_d + temp2_d 1741 __ vaddd(temp1_d, temp1_d, temp2_d); 1742 // output = double-to-float(temp1_d); 1743 __ vcvtsd(output, temp1_d); 1744 break; 1745 } 1746 1747 case Primitive::kPrimDouble: 1748 // Processing a Dex `double-to-float' instruction. 1749 __ vcvtsd(out.AsFpuRegister<SRegister>(), 1750 FromLowSToD(in.AsFpuRegisterPairLow<SRegister>())); 1751 break; 1752 1753 default: 1754 LOG(FATAL) << "Unexpected type conversion from " << input_type 1755 << " to " << result_type; 1756 }; 1757 break; 1758 1759 case Primitive::kPrimDouble: 1760 switch (input_type) { 1761 case Primitive::kPrimByte: 1762 case Primitive::kPrimShort: 1763 case Primitive::kPrimInt: 1764 case Primitive::kPrimChar: { 1765 // Processing a Dex `int-to-double' instruction. 1766 __ vmovsr(out.AsFpuRegisterPairLow<SRegister>(), in.AsRegister<Register>()); 1767 __ vcvtdi(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 1768 out.AsFpuRegisterPairLow<SRegister>()); 1769 break; 1770 } 1771 1772 case Primitive::kPrimLong: { 1773 // Processing a Dex `long-to-double' instruction. 1774 Register low = in.AsRegisterPairLow<Register>(); 1775 Register high = in.AsRegisterPairHigh<Register>(); 1776 SRegister out_s = out.AsFpuRegisterPairLow<SRegister>(); 1777 DRegister out_d = FromLowSToD(out_s); 1778 Register constant_low = locations->GetTemp(0).AsRegister<Register>(); 1779 Register constant_high = locations->GetTemp(1).AsRegister<Register>(); 1780 SRegister temp_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>(); 1781 DRegister temp_d = FromLowSToD(temp_s); 1782 1783 // out_d = int-to-double(high) 1784 __ vmovsr(out_s, high); 1785 __ vcvtdi(out_d, out_s); 1786 // Using vmovd to load the `k2Pow32EncodingForDouble` constant 1787 // as an immediate value into `temp_d` does not work, as 1788 // this instruction only transfers 8 significant bits of its 1789 // immediate operand. Instead, use two 32-bit core 1790 // registers to load `k2Pow32EncodingForDouble` into `temp_d`. 1791 __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble)); 1792 __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble)); 1793 __ vmovdrr(temp_d, constant_low, constant_high); 1794 // out_d = out_d * 2^32 1795 __ vmuld(out_d, out_d, temp_d); 1796 // temp_d = unsigned-to-double(low) 1797 __ vmovsr(temp_s, low); 1798 __ vcvtdu(temp_d, temp_s); 1799 // out_d = out_d + temp_d 1800 __ vaddd(out_d, out_d, temp_d); 1801 break; 1802 } 1803 1804 case Primitive::kPrimFloat: 1805 // Processing a Dex `float-to-double' instruction. 1806 __ vcvtds(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 1807 in.AsFpuRegister<SRegister>()); 1808 break; 1809 1810 default: 1811 LOG(FATAL) << "Unexpected type conversion from " << input_type 1812 << " to " << result_type; 1813 }; 1814 break; 1815 1816 default: 1817 LOG(FATAL) << "Unexpected type conversion from " << input_type 1818 << " to " << result_type; 1819 } 1820} 1821 1822void LocationsBuilderARM::VisitAdd(HAdd* add) { 1823 LocationSummary* locations = 1824 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall); 1825 switch (add->GetResultType()) { 1826 case Primitive::kPrimInt: 1827 case Primitive::kPrimLong: { 1828 bool output_overlaps = (add->GetResultType() == Primitive::kPrimLong); 1829 locations->SetInAt(0, Location::RequiresRegister()); 1830 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1))); 1831 locations->SetOut(Location::RequiresRegister(), output_overlaps); 1832 break; 1833 } 1834 1835 case Primitive::kPrimFloat: 1836 case Primitive::kPrimDouble: { 1837 locations->SetInAt(0, Location::RequiresFpuRegister()); 1838 locations->SetInAt(1, Location::RequiresFpuRegister()); 1839 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1840 break; 1841 } 1842 1843 default: 1844 LOG(FATAL) << "Unexpected add type " << add->GetResultType(); 1845 } 1846} 1847 1848void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) { 1849 LocationSummary* locations = add->GetLocations(); 1850 Location out = locations->Out(); 1851 Location first = locations->InAt(0); 1852 Location second = locations->InAt(1); 1853 switch (add->GetResultType()) { 1854 case Primitive::kPrimInt: 1855 if (second.IsRegister()) { 1856 __ add(out.AsRegister<Register>(), 1857 first.AsRegister<Register>(), 1858 ShifterOperand(second.AsRegister<Register>())); 1859 } else { 1860 __ AddConstant(out.AsRegister<Register>(), 1861 first.AsRegister<Register>(), 1862 second.GetConstant()->AsIntConstant()->GetValue()); 1863 } 1864 break; 1865 1866 case Primitive::kPrimLong: 1867 __ adds(out.AsRegisterPairLow<Register>(), 1868 first.AsRegisterPairLow<Register>(), 1869 ShifterOperand(second.AsRegisterPairLow<Register>())); 1870 __ adc(out.AsRegisterPairHigh<Register>(), 1871 first.AsRegisterPairHigh<Register>(), 1872 ShifterOperand(second.AsRegisterPairHigh<Register>())); 1873 break; 1874 1875 case Primitive::kPrimFloat: 1876 __ vadds(out.AsFpuRegister<SRegister>(), 1877 first.AsFpuRegister<SRegister>(), 1878 second.AsFpuRegister<SRegister>()); 1879 break; 1880 1881 case Primitive::kPrimDouble: 1882 __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 1883 FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()), 1884 FromLowSToD(second.AsFpuRegisterPairLow<SRegister>())); 1885 break; 1886 1887 default: 1888 LOG(FATAL) << "Unexpected add type " << add->GetResultType(); 1889 } 1890} 1891 1892void LocationsBuilderARM::VisitSub(HSub* sub) { 1893 LocationSummary* locations = 1894 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall); 1895 switch (sub->GetResultType()) { 1896 case Primitive::kPrimInt: 1897 case Primitive::kPrimLong: { 1898 bool output_overlaps = (sub->GetResultType() == Primitive::kPrimLong); 1899 locations->SetInAt(0, Location::RequiresRegister()); 1900 locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1))); 1901 locations->SetOut(Location::RequiresRegister(), output_overlaps); 1902 break; 1903 } 1904 case Primitive::kPrimFloat: 1905 case Primitive::kPrimDouble: { 1906 locations->SetInAt(0, Location::RequiresFpuRegister()); 1907 locations->SetInAt(1, Location::RequiresFpuRegister()); 1908 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1909 break; 1910 } 1911 default: 1912 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType(); 1913 } 1914} 1915 1916void InstructionCodeGeneratorARM::VisitSub(HSub* sub) { 1917 LocationSummary* locations = sub->GetLocations(); 1918 Location out = locations->Out(); 1919 Location first = locations->InAt(0); 1920 Location second = locations->InAt(1); 1921 switch (sub->GetResultType()) { 1922 case Primitive::kPrimInt: { 1923 if (second.IsRegister()) { 1924 __ sub(out.AsRegister<Register>(), 1925 first.AsRegister<Register>(), 1926 ShifterOperand(second.AsRegister<Register>())); 1927 } else { 1928 __ AddConstant(out.AsRegister<Register>(), 1929 first.AsRegister<Register>(), 1930 -second.GetConstant()->AsIntConstant()->GetValue()); 1931 } 1932 break; 1933 } 1934 1935 case Primitive::kPrimLong: { 1936 __ subs(out.AsRegisterPairLow<Register>(), 1937 first.AsRegisterPairLow<Register>(), 1938 ShifterOperand(second.AsRegisterPairLow<Register>())); 1939 __ sbc(out.AsRegisterPairHigh<Register>(), 1940 first.AsRegisterPairHigh<Register>(), 1941 ShifterOperand(second.AsRegisterPairHigh<Register>())); 1942 break; 1943 } 1944 1945 case Primitive::kPrimFloat: { 1946 __ vsubs(out.AsFpuRegister<SRegister>(), 1947 first.AsFpuRegister<SRegister>(), 1948 second.AsFpuRegister<SRegister>()); 1949 break; 1950 } 1951 1952 case Primitive::kPrimDouble: { 1953 __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 1954 FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()), 1955 FromLowSToD(second.AsFpuRegisterPairLow<SRegister>())); 1956 break; 1957 } 1958 1959 1960 default: 1961 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType(); 1962 } 1963} 1964 1965void LocationsBuilderARM::VisitMul(HMul* mul) { 1966 LocationSummary* locations = 1967 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall); 1968 switch (mul->GetResultType()) { 1969 case Primitive::kPrimInt: 1970 case Primitive::kPrimLong: { 1971 locations->SetInAt(0, Location::RequiresRegister()); 1972 locations->SetInAt(1, Location::RequiresRegister()); 1973 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1974 break; 1975 } 1976 1977 case Primitive::kPrimFloat: 1978 case Primitive::kPrimDouble: { 1979 locations->SetInAt(0, Location::RequiresFpuRegister()); 1980 locations->SetInAt(1, Location::RequiresFpuRegister()); 1981 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1982 break; 1983 } 1984 1985 default: 1986 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType(); 1987 } 1988} 1989 1990void InstructionCodeGeneratorARM::VisitMul(HMul* mul) { 1991 LocationSummary* locations = mul->GetLocations(); 1992 Location out = locations->Out(); 1993 Location first = locations->InAt(0); 1994 Location second = locations->InAt(1); 1995 switch (mul->GetResultType()) { 1996 case Primitive::kPrimInt: { 1997 __ mul(out.AsRegister<Register>(), 1998 first.AsRegister<Register>(), 1999 second.AsRegister<Register>()); 2000 break; 2001 } 2002 case Primitive::kPrimLong: { 2003 Register out_hi = out.AsRegisterPairHigh<Register>(); 2004 Register out_lo = out.AsRegisterPairLow<Register>(); 2005 Register in1_hi = first.AsRegisterPairHigh<Register>(); 2006 Register in1_lo = first.AsRegisterPairLow<Register>(); 2007 Register in2_hi = second.AsRegisterPairHigh<Register>(); 2008 Register in2_lo = second.AsRegisterPairLow<Register>(); 2009 2010 // Extra checks to protect caused by the existence of R1_R2. 2011 // The algorithm is wrong if out.hi is either in1.lo or in2.lo: 2012 // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2); 2013 DCHECK_NE(out_hi, in1_lo); 2014 DCHECK_NE(out_hi, in2_lo); 2015 2016 // input: in1 - 64 bits, in2 - 64 bits 2017 // output: out 2018 // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo 2019 // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32] 2020 // parts: out.lo = (in1.lo * in2.lo)[31:0] 2021 2022 // IP <- in1.lo * in2.hi 2023 __ mul(IP, in1_lo, in2_hi); 2024 // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo 2025 __ mla(out_hi, in1_hi, in2_lo, IP); 2026 // out.lo <- (in1.lo * in2.lo)[31:0]; 2027 __ umull(out_lo, IP, in1_lo, in2_lo); 2028 // out.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32] 2029 __ add(out_hi, out_hi, ShifterOperand(IP)); 2030 break; 2031 } 2032 2033 case Primitive::kPrimFloat: { 2034 __ vmuls(out.AsFpuRegister<SRegister>(), 2035 first.AsFpuRegister<SRegister>(), 2036 second.AsFpuRegister<SRegister>()); 2037 break; 2038 } 2039 2040 case Primitive::kPrimDouble: { 2041 __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 2042 FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()), 2043 FromLowSToD(second.AsFpuRegisterPairLow<SRegister>())); 2044 break; 2045 } 2046 2047 default: 2048 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType(); 2049 } 2050} 2051 2052void LocationsBuilderARM::VisitDiv(HDiv* div) { 2053 LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong 2054 ? LocationSummary::kCall 2055 : LocationSummary::kNoCall; 2056 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind); 2057 2058 switch (div->GetResultType()) { 2059 case Primitive::kPrimInt: { 2060 locations->SetInAt(0, Location::RequiresRegister()); 2061 locations->SetInAt(1, Location::RequiresRegister()); 2062 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 2063 break; 2064 } 2065 case Primitive::kPrimLong: { 2066 InvokeRuntimeCallingConvention calling_convention; 2067 locations->SetInAt(0, Location::RegisterPairLocation( 2068 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1))); 2069 locations->SetInAt(1, Location::RegisterPairLocation( 2070 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3))); 2071 // The runtime helper puts the output in R0,R2. 2072 locations->SetOut(Location::RegisterPairLocation(R0, R2)); 2073 break; 2074 } 2075 case Primitive::kPrimFloat: 2076 case Primitive::kPrimDouble: { 2077 locations->SetInAt(0, Location::RequiresFpuRegister()); 2078 locations->SetInAt(1, Location::RequiresFpuRegister()); 2079 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 2080 break; 2081 } 2082 2083 default: 2084 LOG(FATAL) << "Unexpected div type " << div->GetResultType(); 2085 } 2086} 2087 2088void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) { 2089 LocationSummary* locations = div->GetLocations(); 2090 Location out = locations->Out(); 2091 Location first = locations->InAt(0); 2092 Location second = locations->InAt(1); 2093 2094 switch (div->GetResultType()) { 2095 case Primitive::kPrimInt: { 2096 __ sdiv(out.AsRegister<Register>(), 2097 first.AsRegister<Register>(), 2098 second.AsRegister<Register>()); 2099 break; 2100 } 2101 2102 case Primitive::kPrimLong: { 2103 InvokeRuntimeCallingConvention calling_convention; 2104 DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>()); 2105 DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>()); 2106 DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>()); 2107 DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>()); 2108 DCHECK_EQ(R0, out.AsRegisterPairLow<Register>()); 2109 DCHECK_EQ(R2, out.AsRegisterPairHigh<Register>()); 2110 2111 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLdiv), div, div->GetDexPc()); 2112 break; 2113 } 2114 2115 case Primitive::kPrimFloat: { 2116 __ vdivs(out.AsFpuRegister<SRegister>(), 2117 first.AsFpuRegister<SRegister>(), 2118 second.AsFpuRegister<SRegister>()); 2119 break; 2120 } 2121 2122 case Primitive::kPrimDouble: { 2123 __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 2124 FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()), 2125 FromLowSToD(second.AsFpuRegisterPairLow<SRegister>())); 2126 break; 2127 } 2128 2129 default: 2130 LOG(FATAL) << "Unexpected div type " << div->GetResultType(); 2131 } 2132} 2133 2134void LocationsBuilderARM::VisitRem(HRem* rem) { 2135 LocationSummary::CallKind call_kind = rem->GetResultType() == Primitive::kPrimLong 2136 ? LocationSummary::kCall 2137 : LocationSummary::kNoCall; 2138 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind); 2139 2140 switch (rem->GetResultType()) { 2141 case Primitive::kPrimInt: { 2142 locations->SetInAt(0, Location::RequiresRegister()); 2143 locations->SetInAt(1, Location::RequiresRegister()); 2144 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 2145 locations->AddTemp(Location::RequiresRegister()); 2146 break; 2147 } 2148 case Primitive::kPrimLong: { 2149 InvokeRuntimeCallingConvention calling_convention; 2150 locations->SetInAt(0, Location::RegisterPairLocation( 2151 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1))); 2152 locations->SetInAt(1, Location::RegisterPairLocation( 2153 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3))); 2154 // The runtime helper puts the output in R2,R3. 2155 locations->SetOut(Location::RegisterPairLocation(R2, R3)); 2156 break; 2157 } 2158 case Primitive::kPrimFloat: 2159 case Primitive::kPrimDouble: { 2160 LOG(FATAL) << "Unimplemented rem type " << rem->GetResultType(); 2161 break; 2162 } 2163 2164 default: 2165 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType(); 2166 } 2167} 2168 2169void InstructionCodeGeneratorARM::VisitRem(HRem* rem) { 2170 LocationSummary* locations = rem->GetLocations(); 2171 Location out = locations->Out(); 2172 Location first = locations->InAt(0); 2173 Location second = locations->InAt(1); 2174 2175 switch (rem->GetResultType()) { 2176 case Primitive::kPrimInt: { 2177 Register reg1 = first.AsRegister<Register>(); 2178 Register reg2 = second.AsRegister<Register>(); 2179 Register temp = locations->GetTemp(0).AsRegister<Register>(); 2180 2181 // temp = reg1 / reg2 (integer division) 2182 // temp = temp * reg2 2183 // dest = reg1 - temp 2184 __ sdiv(temp, reg1, reg2); 2185 __ mul(temp, temp, reg2); 2186 __ sub(out.AsRegister<Register>(), reg1, ShifterOperand(temp)); 2187 break; 2188 } 2189 2190 case Primitive::kPrimLong: { 2191 InvokeRuntimeCallingConvention calling_convention; 2192 DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>()); 2193 DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>()); 2194 DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>()); 2195 DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>()); 2196 DCHECK_EQ(R2, out.AsRegisterPairLow<Register>()); 2197 DCHECK_EQ(R3, out.AsRegisterPairHigh<Register>()); 2198 2199 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLmod), rem, rem->GetDexPc()); 2200 break; 2201 } 2202 2203 case Primitive::kPrimFloat: 2204 case Primitive::kPrimDouble: { 2205 LOG(FATAL) << "Unimplemented rem type " << rem->GetResultType(); 2206 break; 2207 } 2208 2209 default: 2210 LOG(FATAL) << "Unexpected rem type " << rem->GetResultType(); 2211 } 2212} 2213 2214void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) { 2215 LocationSummary* locations = 2216 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2217 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0))); 2218 if (instruction->HasUses()) { 2219 locations->SetOut(Location::SameAsFirstInput()); 2220 } 2221} 2222 2223void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) { 2224 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction); 2225 codegen_->AddSlowPath(slow_path); 2226 2227 LocationSummary* locations = instruction->GetLocations(); 2228 Location value = locations->InAt(0); 2229 2230 switch (instruction->GetType()) { 2231 case Primitive::kPrimInt: { 2232 if (value.IsRegister()) { 2233 __ cmp(value.AsRegister<Register>(), ShifterOperand(0)); 2234 __ b(slow_path->GetEntryLabel(), EQ); 2235 } else { 2236 DCHECK(value.IsConstant()) << value; 2237 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) { 2238 __ b(slow_path->GetEntryLabel()); 2239 } 2240 } 2241 break; 2242 } 2243 case Primitive::kPrimLong: { 2244 if (value.IsRegisterPair()) { 2245 __ orrs(IP, 2246 value.AsRegisterPairLow<Register>(), 2247 ShifterOperand(value.AsRegisterPairHigh<Register>())); 2248 __ b(slow_path->GetEntryLabel(), EQ); 2249 } else { 2250 DCHECK(value.IsConstant()) << value; 2251 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) { 2252 __ b(slow_path->GetEntryLabel()); 2253 } 2254 } 2255 break; 2256 default: 2257 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType(); 2258 } 2259 } 2260} 2261 2262void LocationsBuilderARM::HandleShift(HBinaryOperation* op) { 2263 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr()); 2264 2265 LocationSummary::CallKind call_kind = op->GetResultType() == Primitive::kPrimLong 2266 ? LocationSummary::kCall 2267 : LocationSummary::kNoCall; 2268 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(op, call_kind); 2269 2270 switch (op->GetResultType()) { 2271 case Primitive::kPrimInt: { 2272 locations->SetInAt(0, Location::RequiresRegister()); 2273 locations->SetInAt(1, Location::RegisterOrConstant(op->InputAt(1))); 2274 locations->SetOut(Location::RequiresRegister()); 2275 break; 2276 } 2277 case Primitive::kPrimLong: { 2278 InvokeRuntimeCallingConvention calling_convention; 2279 locations->SetInAt(0, Location::RegisterPairLocation( 2280 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1))); 2281 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); 2282 // The runtime helper puts the output in R0,R2. 2283 locations->SetOut(Location::RegisterPairLocation(R0, R2)); 2284 break; 2285 } 2286 default: 2287 LOG(FATAL) << "Unexpected operation type " << op->GetResultType(); 2288 } 2289} 2290 2291void InstructionCodeGeneratorARM::HandleShift(HBinaryOperation* op) { 2292 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr()); 2293 2294 LocationSummary* locations = op->GetLocations(); 2295 Location out = locations->Out(); 2296 Location first = locations->InAt(0); 2297 Location second = locations->InAt(1); 2298 2299 Primitive::Type type = op->GetResultType(); 2300 switch (type) { 2301 case Primitive::kPrimInt: { 2302 Register out_reg = out.AsRegister<Register>(); 2303 Register first_reg = first.AsRegister<Register>(); 2304 // Arm doesn't mask the shift count so we need to do it ourselves. 2305 if (second.IsRegister()) { 2306 Register second_reg = second.AsRegister<Register>(); 2307 __ and_(second_reg, second_reg, ShifterOperand(kMaxIntShiftValue)); 2308 if (op->IsShl()) { 2309 __ Lsl(out_reg, first_reg, second_reg); 2310 } else if (op->IsShr()) { 2311 __ Asr(out_reg, first_reg, second_reg); 2312 } else { 2313 __ Lsr(out_reg, first_reg, second_reg); 2314 } 2315 } else { 2316 int32_t cst = second.GetConstant()->AsIntConstant()->GetValue(); 2317 uint32_t shift_value = static_cast<uint32_t>(cst & kMaxIntShiftValue); 2318 if (shift_value == 0) { // arm does not support shifting with 0 immediate. 2319 __ Mov(out_reg, first_reg); 2320 } else if (op->IsShl()) { 2321 __ Lsl(out_reg, first_reg, shift_value); 2322 } else if (op->IsShr()) { 2323 __ Asr(out_reg, first_reg, shift_value); 2324 } else { 2325 __ Lsr(out_reg, first_reg, shift_value); 2326 } 2327 } 2328 break; 2329 } 2330 case Primitive::kPrimLong: { 2331 // TODO: Inline the assembly instead of calling the runtime. 2332 InvokeRuntimeCallingConvention calling_convention; 2333 DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>()); 2334 DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>()); 2335 DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegister<Register>()); 2336 DCHECK_EQ(R0, out.AsRegisterPairLow<Register>()); 2337 DCHECK_EQ(R2, out.AsRegisterPairHigh<Register>()); 2338 2339 int32_t entry_point_offset; 2340 if (op->IsShl()) { 2341 entry_point_offset = QUICK_ENTRY_POINT(pShlLong); 2342 } else if (op->IsShr()) { 2343 entry_point_offset = QUICK_ENTRY_POINT(pShrLong); 2344 } else { 2345 entry_point_offset = QUICK_ENTRY_POINT(pUshrLong); 2346 } 2347 __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset); 2348 __ blx(LR); 2349 break; 2350 } 2351 default: 2352 LOG(FATAL) << "Unexpected operation type " << type; 2353 } 2354} 2355 2356void LocationsBuilderARM::VisitShl(HShl* shl) { 2357 HandleShift(shl); 2358} 2359 2360void InstructionCodeGeneratorARM::VisitShl(HShl* shl) { 2361 HandleShift(shl); 2362} 2363 2364void LocationsBuilderARM::VisitShr(HShr* shr) { 2365 HandleShift(shr); 2366} 2367 2368void InstructionCodeGeneratorARM::VisitShr(HShr* shr) { 2369 HandleShift(shr); 2370} 2371 2372void LocationsBuilderARM::VisitUShr(HUShr* ushr) { 2373 HandleShift(ushr); 2374} 2375 2376void InstructionCodeGeneratorARM::VisitUShr(HUShr* ushr) { 2377 HandleShift(ushr); 2378} 2379 2380void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) { 2381 LocationSummary* locations = 2382 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 2383 InvokeRuntimeCallingConvention calling_convention; 2384 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 2385 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 2386 locations->SetOut(Location::RegisterLocation(R0)); 2387} 2388 2389void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) { 2390 InvokeRuntimeCallingConvention calling_convention; 2391 codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1)); 2392 __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex()); 2393 codegen_->InvokeRuntime( 2394 QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc()); 2395} 2396 2397void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) { 2398 LocationSummary* locations = 2399 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 2400 InvokeRuntimeCallingConvention calling_convention; 2401 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 2402 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 2403 locations->SetOut(Location::RegisterLocation(R0)); 2404 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); 2405} 2406 2407void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) { 2408 InvokeRuntimeCallingConvention calling_convention; 2409 codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1)); 2410 __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex()); 2411 codegen_->InvokeRuntime( 2412 QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc()); 2413} 2414 2415void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) { 2416 LocationSummary* locations = 2417 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2418 Location location = parameter_visitor_.GetNextLocation(instruction->GetType()); 2419 if (location.IsStackSlot()) { 2420 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); 2421 } else if (location.IsDoubleStackSlot()) { 2422 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); 2423 } 2424 locations->SetOut(location); 2425} 2426 2427void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) { 2428 // Nothing to do, the parameter is already at its location. 2429 UNUSED(instruction); 2430} 2431 2432void LocationsBuilderARM::VisitNot(HNot* not_) { 2433 LocationSummary* locations = 2434 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall); 2435 locations->SetInAt(0, Location::RequiresRegister()); 2436 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 2437} 2438 2439void InstructionCodeGeneratorARM::VisitNot(HNot* not_) { 2440 LocationSummary* locations = not_->GetLocations(); 2441 Location out = locations->Out(); 2442 Location in = locations->InAt(0); 2443 switch (not_->InputAt(0)->GetType()) { 2444 case Primitive::kPrimBoolean: 2445 __ eor(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(1)); 2446 break; 2447 2448 case Primitive::kPrimInt: 2449 __ mvn(out.AsRegister<Register>(), ShifterOperand(in.AsRegister<Register>())); 2450 break; 2451 2452 case Primitive::kPrimLong: 2453 __ mvn(out.AsRegisterPairLow<Register>(), 2454 ShifterOperand(in.AsRegisterPairLow<Register>())); 2455 __ mvn(out.AsRegisterPairHigh<Register>(), 2456 ShifterOperand(in.AsRegisterPairHigh<Register>())); 2457 break; 2458 2459 default: 2460 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType(); 2461 } 2462} 2463 2464void LocationsBuilderARM::VisitCompare(HCompare* compare) { 2465 LocationSummary* locations = 2466 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall); 2467 switch (compare->InputAt(0)->GetType()) { 2468 case Primitive::kPrimLong: { 2469 locations->SetInAt(0, Location::RequiresRegister()); 2470 locations->SetInAt(1, Location::RequiresRegister()); 2471 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 2472 break; 2473 } 2474 case Primitive::kPrimFloat: 2475 case Primitive::kPrimDouble: { 2476 locations->SetInAt(0, Location::RequiresFpuRegister()); 2477 locations->SetInAt(1, Location::RequiresFpuRegister()); 2478 locations->SetOut(Location::RequiresRegister()); 2479 break; 2480 } 2481 default: 2482 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType(); 2483 } 2484} 2485 2486void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) { 2487 LocationSummary* locations = compare->GetLocations(); 2488 Register out = locations->Out().AsRegister<Register>(); 2489 Location left = locations->InAt(0); 2490 Location right = locations->InAt(1); 2491 2492 Label less, greater, done; 2493 Primitive::Type type = compare->InputAt(0)->GetType(); 2494 switch (type) { 2495 case Primitive::kPrimLong: { 2496 __ cmp(left.AsRegisterPairHigh<Register>(), 2497 ShifterOperand(right.AsRegisterPairHigh<Register>())); // Signed compare. 2498 __ b(&less, LT); 2499 __ b(&greater, GT); 2500 // Do LoadImmediate before any `cmp`, as LoadImmediate might affect the status flags. 2501 __ LoadImmediate(out, 0); 2502 __ cmp(left.AsRegisterPairLow<Register>(), 2503 ShifterOperand(right.AsRegisterPairLow<Register>())); // Unsigned compare. 2504 break; 2505 } 2506 case Primitive::kPrimFloat: 2507 case Primitive::kPrimDouble: { 2508 __ LoadImmediate(out, 0); 2509 if (type == Primitive::kPrimFloat) { 2510 __ vcmps(left.AsFpuRegister<SRegister>(), right.AsFpuRegister<SRegister>()); 2511 } else { 2512 __ vcmpd(FromLowSToD(left.AsFpuRegisterPairLow<SRegister>()), 2513 FromLowSToD(right.AsFpuRegisterPairLow<SRegister>())); 2514 } 2515 __ vmstat(); // transfer FP status register to ARM APSR. 2516 __ b(compare->IsGtBias() ? &greater : &less, VS); // VS for unordered. 2517 break; 2518 } 2519 default: 2520 LOG(FATAL) << "Unexpected compare type " << type; 2521 } 2522 __ b(&done, EQ); 2523 __ b(&less, CC); // CC is for both: unsigned compare for longs and 'less than' for floats. 2524 2525 __ Bind(&greater); 2526 __ LoadImmediate(out, 1); 2527 __ b(&done); 2528 2529 __ Bind(&less); 2530 __ LoadImmediate(out, -1); 2531 2532 __ Bind(&done); 2533} 2534 2535void LocationsBuilderARM::VisitPhi(HPhi* instruction) { 2536 LocationSummary* locations = 2537 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2538 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) { 2539 locations->SetInAt(i, Location::Any()); 2540 } 2541 locations->SetOut(Location::Any()); 2542} 2543 2544void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) { 2545 UNUSED(instruction); 2546 LOG(FATAL) << "Unreachable"; 2547} 2548 2549void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) { 2550 LocationSummary* locations = 2551 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2552 bool needs_write_barrier = 2553 CodeGenerator::StoreNeedsWriteBarrier(instruction->GetFieldType(), instruction->GetValue()); 2554 locations->SetInAt(0, Location::RequiresRegister()); 2555 locations->SetInAt(1, Location::RequiresRegister()); 2556 // Temporary registers for the write barrier. 2557 if (needs_write_barrier) { 2558 locations->AddTemp(Location::RequiresRegister()); 2559 locations->AddTemp(Location::RequiresRegister()); 2560 } 2561} 2562 2563void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) { 2564 LocationSummary* locations = instruction->GetLocations(); 2565 Register obj = locations->InAt(0).AsRegister<Register>(); 2566 uint32_t offset = instruction->GetFieldOffset().Uint32Value(); 2567 Primitive::Type field_type = instruction->GetFieldType(); 2568 2569 switch (field_type) { 2570 case Primitive::kPrimBoolean: 2571 case Primitive::kPrimByte: { 2572 Register value = locations->InAt(1).AsRegister<Register>(); 2573 __ StoreToOffset(kStoreByte, value, obj, offset); 2574 break; 2575 } 2576 2577 case Primitive::kPrimShort: 2578 case Primitive::kPrimChar: { 2579 Register value = locations->InAt(1).AsRegister<Register>(); 2580 __ StoreToOffset(kStoreHalfword, value, obj, offset); 2581 break; 2582 } 2583 2584 case Primitive::kPrimInt: 2585 case Primitive::kPrimNot: { 2586 Register value = locations->InAt(1).AsRegister<Register>(); 2587 __ StoreToOffset(kStoreWord, value, obj, offset); 2588 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->GetValue())) { 2589 Register temp = locations->GetTemp(0).AsRegister<Register>(); 2590 Register card = locations->GetTemp(1).AsRegister<Register>(); 2591 codegen_->MarkGCCard(temp, card, obj, value); 2592 } 2593 break; 2594 } 2595 2596 case Primitive::kPrimLong: { 2597 Location value = locations->InAt(1); 2598 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset); 2599 break; 2600 } 2601 2602 case Primitive::kPrimFloat: { 2603 SRegister value = locations->InAt(1).AsFpuRegister<SRegister>(); 2604 __ StoreSToOffset(value, obj, offset); 2605 break; 2606 } 2607 2608 case Primitive::kPrimDouble: { 2609 DRegister value = FromLowSToD(locations->InAt(1).AsFpuRegisterPairLow<SRegister>()); 2610 __ StoreDToOffset(value, obj, offset); 2611 break; 2612 } 2613 2614 case Primitive::kPrimVoid: 2615 LOG(FATAL) << "Unreachable type " << field_type; 2616 UNREACHABLE(); 2617 } 2618} 2619 2620void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) { 2621 LocationSummary* locations = 2622 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2623 locations->SetInAt(0, Location::RequiresRegister()); 2624 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 2625} 2626 2627void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) { 2628 LocationSummary* locations = instruction->GetLocations(); 2629 Register obj = locations->InAt(0).AsRegister<Register>(); 2630 uint32_t offset = instruction->GetFieldOffset().Uint32Value(); 2631 2632 switch (instruction->GetType()) { 2633 case Primitive::kPrimBoolean: { 2634 Register out = locations->Out().AsRegister<Register>(); 2635 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset); 2636 break; 2637 } 2638 2639 case Primitive::kPrimByte: { 2640 Register out = locations->Out().AsRegister<Register>(); 2641 __ LoadFromOffset(kLoadSignedByte, out, obj, offset); 2642 break; 2643 } 2644 2645 case Primitive::kPrimShort: { 2646 Register out = locations->Out().AsRegister<Register>(); 2647 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset); 2648 break; 2649 } 2650 2651 case Primitive::kPrimChar: { 2652 Register out = locations->Out().AsRegister<Register>(); 2653 __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset); 2654 break; 2655 } 2656 2657 case Primitive::kPrimInt: 2658 case Primitive::kPrimNot: { 2659 Register out = locations->Out().AsRegister<Register>(); 2660 __ LoadFromOffset(kLoadWord, out, obj, offset); 2661 break; 2662 } 2663 2664 case Primitive::kPrimLong: { 2665 // TODO: support volatile. 2666 Location out = locations->Out(); 2667 __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset); 2668 break; 2669 } 2670 2671 case Primitive::kPrimFloat: { 2672 SRegister out = locations->Out().AsFpuRegister<SRegister>(); 2673 __ LoadSFromOffset(out, obj, offset); 2674 break; 2675 } 2676 2677 case Primitive::kPrimDouble: { 2678 DRegister out = FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()); 2679 __ LoadDFromOffset(out, obj, offset); 2680 break; 2681 } 2682 2683 case Primitive::kPrimVoid: 2684 LOG(FATAL) << "Unreachable type " << instruction->GetType(); 2685 UNREACHABLE(); 2686 } 2687} 2688 2689void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) { 2690 LocationSummary* locations = 2691 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2692 locations->SetInAt(0, Location::RequiresRegister()); 2693 if (instruction->HasUses()) { 2694 locations->SetOut(Location::SameAsFirstInput()); 2695 } 2696} 2697 2698void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) { 2699 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction); 2700 codegen_->AddSlowPath(slow_path); 2701 2702 LocationSummary* locations = instruction->GetLocations(); 2703 Location obj = locations->InAt(0); 2704 2705 if (obj.IsRegister()) { 2706 __ cmp(obj.AsRegister<Register>(), ShifterOperand(0)); 2707 __ b(slow_path->GetEntryLabel(), EQ); 2708 } else { 2709 DCHECK(obj.IsConstant()) << obj; 2710 DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0); 2711 __ b(slow_path->GetEntryLabel()); 2712 } 2713} 2714 2715void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) { 2716 LocationSummary* locations = 2717 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2718 locations->SetInAt(0, Location::RequiresRegister()); 2719 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1))); 2720 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 2721} 2722 2723void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) { 2724 LocationSummary* locations = instruction->GetLocations(); 2725 Register obj = locations->InAt(0).AsRegister<Register>(); 2726 Location index = locations->InAt(1); 2727 2728 switch (instruction->GetType()) { 2729 case Primitive::kPrimBoolean: { 2730 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value(); 2731 Register out = locations->Out().AsRegister<Register>(); 2732 if (index.IsConstant()) { 2733 size_t offset = 2734 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset; 2735 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset); 2736 } else { 2737 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>())); 2738 __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset); 2739 } 2740 break; 2741 } 2742 2743 case Primitive::kPrimByte: { 2744 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value(); 2745 Register out = locations->Out().AsRegister<Register>(); 2746 if (index.IsConstant()) { 2747 size_t offset = 2748 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset; 2749 __ LoadFromOffset(kLoadSignedByte, out, obj, offset); 2750 } else { 2751 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>())); 2752 __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset); 2753 } 2754 break; 2755 } 2756 2757 case Primitive::kPrimShort: { 2758 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value(); 2759 Register out = locations->Out().AsRegister<Register>(); 2760 if (index.IsConstant()) { 2761 size_t offset = 2762 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset; 2763 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset); 2764 } else { 2765 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2)); 2766 __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset); 2767 } 2768 break; 2769 } 2770 2771 case Primitive::kPrimChar: { 2772 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value(); 2773 Register out = locations->Out().AsRegister<Register>(); 2774 if (index.IsConstant()) { 2775 size_t offset = 2776 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset; 2777 __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset); 2778 } else { 2779 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2)); 2780 __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset); 2781 } 2782 break; 2783 } 2784 2785 case Primitive::kPrimInt: 2786 case Primitive::kPrimNot: { 2787 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t)); 2788 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value(); 2789 Register out = locations->Out().AsRegister<Register>(); 2790 if (index.IsConstant()) { 2791 size_t offset = 2792 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset; 2793 __ LoadFromOffset(kLoadWord, out, obj, offset); 2794 } else { 2795 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4)); 2796 __ LoadFromOffset(kLoadWord, out, IP, data_offset); 2797 } 2798 break; 2799 } 2800 2801 case Primitive::kPrimLong: { 2802 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value(); 2803 Location out = locations->Out(); 2804 if (index.IsConstant()) { 2805 size_t offset = 2806 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset; 2807 __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset); 2808 } else { 2809 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8)); 2810 __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset); 2811 } 2812 break; 2813 } 2814 2815 case Primitive::kPrimFloat: 2816 case Primitive::kPrimDouble: 2817 LOG(FATAL) << "Unimplemented register type " << instruction->GetType(); 2818 UNREACHABLE(); 2819 case Primitive::kPrimVoid: 2820 LOG(FATAL) << "Unreachable type " << instruction->GetType(); 2821 UNREACHABLE(); 2822 } 2823} 2824 2825void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) { 2826 Primitive::Type value_type = instruction->GetComponentType(); 2827 2828 bool needs_write_barrier = 2829 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue()); 2830 bool needs_runtime_call = instruction->NeedsTypeCheck(); 2831 2832 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary( 2833 instruction, needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall); 2834 if (needs_runtime_call) { 2835 InvokeRuntimeCallingConvention calling_convention; 2836 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 2837 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 2838 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); 2839 } else { 2840 locations->SetInAt(0, Location::RequiresRegister()); 2841 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1))); 2842 locations->SetInAt(2, Location::RequiresRegister()); 2843 2844 if (needs_write_barrier) { 2845 // Temporary registers for the write barrier. 2846 locations->AddTemp(Location::RequiresRegister()); 2847 locations->AddTemp(Location::RequiresRegister()); 2848 } 2849 } 2850} 2851 2852void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) { 2853 LocationSummary* locations = instruction->GetLocations(); 2854 Register obj = locations->InAt(0).AsRegister<Register>(); 2855 Location index = locations->InAt(1); 2856 Primitive::Type value_type = instruction->GetComponentType(); 2857 bool needs_runtime_call = locations->WillCall(); 2858 bool needs_write_barrier = 2859 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue()); 2860 2861 switch (value_type) { 2862 case Primitive::kPrimBoolean: 2863 case Primitive::kPrimByte: { 2864 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value(); 2865 Register value = locations->InAt(2).AsRegister<Register>(); 2866 if (index.IsConstant()) { 2867 size_t offset = 2868 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset; 2869 __ StoreToOffset(kStoreByte, value, obj, offset); 2870 } else { 2871 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>())); 2872 __ StoreToOffset(kStoreByte, value, IP, data_offset); 2873 } 2874 break; 2875 } 2876 2877 case Primitive::kPrimShort: 2878 case Primitive::kPrimChar: { 2879 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value(); 2880 Register value = locations->InAt(2).AsRegister<Register>(); 2881 if (index.IsConstant()) { 2882 size_t offset = 2883 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset; 2884 __ StoreToOffset(kStoreHalfword, value, obj, offset); 2885 } else { 2886 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2)); 2887 __ StoreToOffset(kStoreHalfword, value, IP, data_offset); 2888 } 2889 break; 2890 } 2891 2892 case Primitive::kPrimInt: 2893 case Primitive::kPrimNot: { 2894 if (!needs_runtime_call) { 2895 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value(); 2896 Register value = locations->InAt(2).AsRegister<Register>(); 2897 if (index.IsConstant()) { 2898 size_t offset = 2899 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset; 2900 __ StoreToOffset(kStoreWord, value, obj, offset); 2901 } else { 2902 DCHECK(index.IsRegister()) << index; 2903 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4)); 2904 __ StoreToOffset(kStoreWord, value, IP, data_offset); 2905 } 2906 if (needs_write_barrier) { 2907 DCHECK_EQ(value_type, Primitive::kPrimNot); 2908 Register temp = locations->GetTemp(0).AsRegister<Register>(); 2909 Register card = locations->GetTemp(1).AsRegister<Register>(); 2910 codegen_->MarkGCCard(temp, card, obj, value); 2911 } 2912 } else { 2913 DCHECK_EQ(value_type, Primitive::kPrimNot); 2914 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject), 2915 instruction, 2916 instruction->GetDexPc()); 2917 } 2918 break; 2919 } 2920 2921 case Primitive::kPrimLong: { 2922 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value(); 2923 Location value = locations->InAt(2); 2924 if (index.IsConstant()) { 2925 size_t offset = 2926 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset; 2927 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset); 2928 } else { 2929 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8)); 2930 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset); 2931 } 2932 break; 2933 } 2934 2935 case Primitive::kPrimFloat: 2936 case Primitive::kPrimDouble: 2937 LOG(FATAL) << "Unimplemented register type " << instruction->GetType(); 2938 UNREACHABLE(); 2939 case Primitive::kPrimVoid: 2940 LOG(FATAL) << "Unreachable type " << instruction->GetType(); 2941 UNREACHABLE(); 2942 } 2943} 2944 2945void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) { 2946 LocationSummary* locations = 2947 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2948 locations->SetInAt(0, Location::RequiresRegister()); 2949 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 2950} 2951 2952void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) { 2953 LocationSummary* locations = instruction->GetLocations(); 2954 uint32_t offset = mirror::Array::LengthOffset().Uint32Value(); 2955 Register obj = locations->InAt(0).AsRegister<Register>(); 2956 Register out = locations->Out().AsRegister<Register>(); 2957 __ LoadFromOffset(kLoadWord, out, obj, offset); 2958} 2959 2960void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) { 2961 LocationSummary* locations = 2962 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2963 locations->SetInAt(0, Location::RequiresRegister()); 2964 locations->SetInAt(1, Location::RequiresRegister()); 2965 if (instruction->HasUses()) { 2966 locations->SetOut(Location::SameAsFirstInput()); 2967 } 2968} 2969 2970void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) { 2971 LocationSummary* locations = instruction->GetLocations(); 2972 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM( 2973 instruction, locations->InAt(0), locations->InAt(1)); 2974 codegen_->AddSlowPath(slow_path); 2975 2976 Register index = locations->InAt(0).AsRegister<Register>(); 2977 Register length = locations->InAt(1).AsRegister<Register>(); 2978 2979 __ cmp(index, ShifterOperand(length)); 2980 __ b(slow_path->GetEntryLabel(), CS); 2981} 2982 2983void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) { 2984 Label is_null; 2985 __ CompareAndBranchIfZero(value, &is_null); 2986 __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value()); 2987 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift); 2988 __ strb(card, Address(card, temp)); 2989 __ Bind(&is_null); 2990} 2991 2992void LocationsBuilderARM::VisitTemporary(HTemporary* temp) { 2993 temp->SetLocations(nullptr); 2994} 2995 2996void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) { 2997 // Nothing to do, this is driven by the code generator. 2998 UNUSED(temp); 2999} 3000 3001void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) { 3002 UNUSED(instruction); 3003 LOG(FATAL) << "Unreachable"; 3004} 3005 3006void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) { 3007 codegen_->GetMoveResolver()->EmitNativeCode(instruction); 3008} 3009 3010void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) { 3011 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath); 3012} 3013 3014void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) { 3015 HBasicBlock* block = instruction->GetBlock(); 3016 if (block->GetLoopInformation() != nullptr) { 3017 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction); 3018 // The back edge will generate the suspend check. 3019 return; 3020 } 3021 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) { 3022 // The goto will generate the suspend check. 3023 return; 3024 } 3025 GenerateSuspendCheck(instruction, nullptr); 3026} 3027 3028void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction, 3029 HBasicBlock* successor) { 3030 SuspendCheckSlowPathARM* slow_path = 3031 new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor); 3032 codegen_->AddSlowPath(slow_path); 3033 3034 __ LoadFromOffset( 3035 kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value()); 3036 __ cmp(IP, ShifterOperand(0)); 3037 // TODO: Figure out the branch offsets and use cbz/cbnz. 3038 if (successor == nullptr) { 3039 __ b(slow_path->GetEntryLabel(), NE); 3040 __ Bind(slow_path->GetReturnLabel()); 3041 } else { 3042 __ b(codegen_->GetLabelOf(successor), EQ); 3043 __ b(slow_path->GetEntryLabel()); 3044 } 3045} 3046 3047ArmAssembler* ParallelMoveResolverARM::GetAssembler() const { 3048 return codegen_->GetAssembler(); 3049} 3050 3051void ParallelMoveResolverARM::EmitMove(size_t index) { 3052 MoveOperands* move = moves_.Get(index); 3053 Location source = move->GetSource(); 3054 Location destination = move->GetDestination(); 3055 3056 if (source.IsRegister()) { 3057 if (destination.IsRegister()) { 3058 __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>()); 3059 } else { 3060 DCHECK(destination.IsStackSlot()); 3061 __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), 3062 SP, destination.GetStackIndex()); 3063 } 3064 } else if (source.IsStackSlot()) { 3065 if (destination.IsRegister()) { 3066 __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), 3067 SP, source.GetStackIndex()); 3068 } else { 3069 DCHECK(destination.IsStackSlot()); 3070 __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex()); 3071 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 3072 } 3073 } else { 3074 DCHECK(source.IsConstant()); 3075 DCHECK(source.GetConstant()->IsIntConstant()); 3076 int32_t value = source.GetConstant()->AsIntConstant()->GetValue(); 3077 if (destination.IsRegister()) { 3078 __ LoadImmediate(destination.AsRegister<Register>(), value); 3079 } else { 3080 DCHECK(destination.IsStackSlot()); 3081 __ LoadImmediate(IP, value); 3082 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 3083 } 3084 } 3085} 3086 3087void ParallelMoveResolverARM::Exchange(Register reg, int mem) { 3088 __ Mov(IP, reg); 3089 __ LoadFromOffset(kLoadWord, reg, SP, mem); 3090 __ StoreToOffset(kStoreWord, IP, SP, mem); 3091} 3092 3093void ParallelMoveResolverARM::Exchange(int mem1, int mem2) { 3094 ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters()); 3095 int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0; 3096 __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()), 3097 SP, mem1 + stack_offset); 3098 __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset); 3099 __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()), 3100 SP, mem2 + stack_offset); 3101 __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset); 3102} 3103 3104void ParallelMoveResolverARM::EmitSwap(size_t index) { 3105 MoveOperands* move = moves_.Get(index); 3106 Location source = move->GetSource(); 3107 Location destination = move->GetDestination(); 3108 3109 if (source.IsRegister() && destination.IsRegister()) { 3110 DCHECK_NE(source.AsRegister<Register>(), IP); 3111 DCHECK_NE(destination.AsRegister<Register>(), IP); 3112 __ Mov(IP, source.AsRegister<Register>()); 3113 __ Mov(source.AsRegister<Register>(), destination.AsRegister<Register>()); 3114 __ Mov(destination.AsRegister<Register>(), IP); 3115 } else if (source.IsRegister() && destination.IsStackSlot()) { 3116 Exchange(source.AsRegister<Register>(), destination.GetStackIndex()); 3117 } else if (source.IsStackSlot() && destination.IsRegister()) { 3118 Exchange(destination.AsRegister<Register>(), source.GetStackIndex()); 3119 } else if (source.IsStackSlot() && destination.IsStackSlot()) { 3120 Exchange(source.GetStackIndex(), destination.GetStackIndex()); 3121 } else { 3122 LOG(FATAL) << "Unimplemented"; 3123 } 3124} 3125 3126void ParallelMoveResolverARM::SpillScratch(int reg) { 3127 __ Push(static_cast<Register>(reg)); 3128} 3129 3130void ParallelMoveResolverARM::RestoreScratch(int reg) { 3131 __ Pop(static_cast<Register>(reg)); 3132} 3133 3134void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) { 3135 LocationSummary::CallKind call_kind = cls->CanCallRuntime() 3136 ? LocationSummary::kCallOnSlowPath 3137 : LocationSummary::kNoCall; 3138 LocationSummary* locations = 3139 new (GetGraph()->GetArena()) LocationSummary(cls, call_kind); 3140 locations->SetOut(Location::RequiresRegister()); 3141} 3142 3143void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) { 3144 Register out = cls->GetLocations()->Out().AsRegister<Register>(); 3145 if (cls->IsReferrersClass()) { 3146 DCHECK(!cls->CanCallRuntime()); 3147 DCHECK(!cls->MustGenerateClinitCheck()); 3148 codegen_->LoadCurrentMethod(out); 3149 __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value()); 3150 } else { 3151 DCHECK(cls->CanCallRuntime()); 3152 codegen_->LoadCurrentMethod(out); 3153 __ LoadFromOffset( 3154 kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value()); 3155 __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())); 3156 3157 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM( 3158 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck()); 3159 codegen_->AddSlowPath(slow_path); 3160 __ cmp(out, ShifterOperand(0)); 3161 __ b(slow_path->GetEntryLabel(), EQ); 3162 if (cls->MustGenerateClinitCheck()) { 3163 GenerateClassInitializationCheck(slow_path, out); 3164 } else { 3165 __ Bind(slow_path->GetExitLabel()); 3166 } 3167 } 3168} 3169 3170void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) { 3171 LocationSummary* locations = 3172 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath); 3173 locations->SetInAt(0, Location::RequiresRegister()); 3174 if (check->HasUses()) { 3175 locations->SetOut(Location::SameAsFirstInput()); 3176 } 3177} 3178 3179void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) { 3180 // We assume the class is not null. 3181 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM( 3182 check->GetLoadClass(), check, check->GetDexPc(), true); 3183 codegen_->AddSlowPath(slow_path); 3184 GenerateClassInitializationCheck(slow_path, 3185 check->GetLocations()->InAt(0).AsRegister<Register>()); 3186} 3187 3188void InstructionCodeGeneratorARM::GenerateClassInitializationCheck( 3189 SlowPathCodeARM* slow_path, Register class_reg) { 3190 __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value()); 3191 __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized)); 3192 __ b(slow_path->GetEntryLabel(), LT); 3193 // Even if the initialized flag is set, we may be in a situation where caches are not synced 3194 // properly. Therefore, we do a memory fence. 3195 __ dmb(ISH); 3196 __ Bind(slow_path->GetExitLabel()); 3197} 3198 3199void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) { 3200 LocationSummary* locations = 3201 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 3202 locations->SetInAt(0, Location::RequiresRegister()); 3203 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 3204} 3205 3206void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) { 3207 LocationSummary* locations = instruction->GetLocations(); 3208 Register cls = locations->InAt(0).AsRegister<Register>(); 3209 uint32_t offset = instruction->GetFieldOffset().Uint32Value(); 3210 3211 switch (instruction->GetType()) { 3212 case Primitive::kPrimBoolean: { 3213 Register out = locations->Out().AsRegister<Register>(); 3214 __ LoadFromOffset(kLoadUnsignedByte, out, cls, offset); 3215 break; 3216 } 3217 3218 case Primitive::kPrimByte: { 3219 Register out = locations->Out().AsRegister<Register>(); 3220 __ LoadFromOffset(kLoadSignedByte, out, cls, offset); 3221 break; 3222 } 3223 3224 case Primitive::kPrimShort: { 3225 Register out = locations->Out().AsRegister<Register>(); 3226 __ LoadFromOffset(kLoadSignedHalfword, out, cls, offset); 3227 break; 3228 } 3229 3230 case Primitive::kPrimChar: { 3231 Register out = locations->Out().AsRegister<Register>(); 3232 __ LoadFromOffset(kLoadUnsignedHalfword, out, cls, offset); 3233 break; 3234 } 3235 3236 case Primitive::kPrimInt: 3237 case Primitive::kPrimNot: { 3238 Register out = locations->Out().AsRegister<Register>(); 3239 __ LoadFromOffset(kLoadWord, out, cls, offset); 3240 break; 3241 } 3242 3243 case Primitive::kPrimLong: { 3244 // TODO: support volatile. 3245 Location out = locations->Out(); 3246 __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), cls, offset); 3247 break; 3248 } 3249 3250 case Primitive::kPrimFloat: { 3251 SRegister out = locations->Out().AsFpuRegister<SRegister>(); 3252 __ LoadSFromOffset(out, cls, offset); 3253 break; 3254 } 3255 3256 case Primitive::kPrimDouble: { 3257 DRegister out = FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()); 3258 __ LoadDFromOffset(out, cls, offset); 3259 break; 3260 } 3261 3262 case Primitive::kPrimVoid: 3263 LOG(FATAL) << "Unreachable type " << instruction->GetType(); 3264 UNREACHABLE(); 3265 } 3266} 3267 3268void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) { 3269 LocationSummary* locations = 3270 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 3271 bool needs_write_barrier = 3272 CodeGenerator::StoreNeedsWriteBarrier(instruction->GetFieldType(), instruction->GetValue()); 3273 locations->SetInAt(0, Location::RequiresRegister()); 3274 locations->SetInAt(1, Location::RequiresRegister()); 3275 // Temporary registers for the write barrier. 3276 if (needs_write_barrier) { 3277 locations->AddTemp(Location::RequiresRegister()); 3278 locations->AddTemp(Location::RequiresRegister()); 3279 } 3280} 3281 3282void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) { 3283 LocationSummary* locations = instruction->GetLocations(); 3284 Register cls = locations->InAt(0).AsRegister<Register>(); 3285 uint32_t offset = instruction->GetFieldOffset().Uint32Value(); 3286 Primitive::Type field_type = instruction->GetFieldType(); 3287 3288 switch (field_type) { 3289 case Primitive::kPrimBoolean: 3290 case Primitive::kPrimByte: { 3291 Register value = locations->InAt(1).AsRegister<Register>(); 3292 __ StoreToOffset(kStoreByte, value, cls, offset); 3293 break; 3294 } 3295 3296 case Primitive::kPrimShort: 3297 case Primitive::kPrimChar: { 3298 Register value = locations->InAt(1).AsRegister<Register>(); 3299 __ StoreToOffset(kStoreHalfword, value, cls, offset); 3300 break; 3301 } 3302 3303 case Primitive::kPrimInt: 3304 case Primitive::kPrimNot: { 3305 Register value = locations->InAt(1).AsRegister<Register>(); 3306 __ StoreToOffset(kStoreWord, value, cls, offset); 3307 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->GetValue())) { 3308 Register temp = locations->GetTemp(0).AsRegister<Register>(); 3309 Register card = locations->GetTemp(1).AsRegister<Register>(); 3310 codegen_->MarkGCCard(temp, card, cls, value); 3311 } 3312 break; 3313 } 3314 3315 case Primitive::kPrimLong: { 3316 Location value = locations->InAt(1); 3317 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), cls, offset); 3318 break; 3319 } 3320 3321 case Primitive::kPrimFloat: { 3322 SRegister value = locations->InAt(1).AsFpuRegister<SRegister>(); 3323 __ StoreSToOffset(value, cls, offset); 3324 break; 3325 } 3326 3327 case Primitive::kPrimDouble: { 3328 DRegister value = FromLowSToD(locations->InAt(1).AsFpuRegisterPairLow<SRegister>()); 3329 __ StoreDToOffset(value, cls, offset); 3330 break; 3331 } 3332 3333 case Primitive::kPrimVoid: 3334 LOG(FATAL) << "Unreachable type " << field_type; 3335 UNREACHABLE(); 3336 } 3337} 3338 3339void LocationsBuilderARM::VisitLoadString(HLoadString* load) { 3340 LocationSummary* locations = 3341 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath); 3342 locations->SetOut(Location::RequiresRegister()); 3343} 3344 3345void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) { 3346 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load); 3347 codegen_->AddSlowPath(slow_path); 3348 3349 Register out = load->GetLocations()->Out().AsRegister<Register>(); 3350 codegen_->LoadCurrentMethod(out); 3351 __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value()); 3352 __ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value()); 3353 __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex())); 3354 __ cmp(out, ShifterOperand(0)); 3355 __ b(slow_path->GetEntryLabel(), EQ); 3356 __ Bind(slow_path->GetExitLabel()); 3357} 3358 3359void LocationsBuilderARM::VisitLoadException(HLoadException* load) { 3360 LocationSummary* locations = 3361 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall); 3362 locations->SetOut(Location::RequiresRegister()); 3363} 3364 3365void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) { 3366 Register out = load->GetLocations()->Out().AsRegister<Register>(); 3367 int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value(); 3368 __ LoadFromOffset(kLoadWord, out, TR, offset); 3369 __ LoadImmediate(IP, 0); 3370 __ StoreToOffset(kStoreWord, IP, TR, offset); 3371} 3372 3373void LocationsBuilderARM::VisitThrow(HThrow* instruction) { 3374 LocationSummary* locations = 3375 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 3376 InvokeRuntimeCallingConvention calling_convention; 3377 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 3378} 3379 3380void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) { 3381 codegen_->InvokeRuntime( 3382 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc()); 3383} 3384 3385void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) { 3386 LocationSummary::CallKind call_kind = instruction->IsClassFinal() 3387 ? LocationSummary::kNoCall 3388 : LocationSummary::kCallOnSlowPath; 3389 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind); 3390 locations->SetInAt(0, Location::RequiresRegister()); 3391 locations->SetInAt(1, Location::RequiresRegister()); 3392 locations->SetOut(Location::RequiresRegister()); 3393} 3394 3395void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) { 3396 LocationSummary* locations = instruction->GetLocations(); 3397 Register obj = locations->InAt(0).AsRegister<Register>(); 3398 Register cls = locations->InAt(1).AsRegister<Register>(); 3399 Register out = locations->Out().AsRegister<Register>(); 3400 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); 3401 Label done, zero; 3402 SlowPathCodeARM* slow_path = nullptr; 3403 3404 // Return 0 if `obj` is null. 3405 // TODO: avoid this check if we know obj is not null. 3406 __ cmp(obj, ShifterOperand(0)); 3407 __ b(&zero, EQ); 3408 // Compare the class of `obj` with `cls`. 3409 __ LoadFromOffset(kLoadWord, out, obj, class_offset); 3410 __ cmp(out, ShifterOperand(cls)); 3411 if (instruction->IsClassFinal()) { 3412 // Classes must be equal for the instanceof to succeed. 3413 __ b(&zero, NE); 3414 __ LoadImmediate(out, 1); 3415 __ b(&done); 3416 } else { 3417 // If the classes are not equal, we go into a slow path. 3418 DCHECK(locations->OnlyCallsOnSlowPath()); 3419 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM( 3420 instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc()); 3421 codegen_->AddSlowPath(slow_path); 3422 __ b(slow_path->GetEntryLabel(), NE); 3423 __ LoadImmediate(out, 1); 3424 __ b(&done); 3425 } 3426 __ Bind(&zero); 3427 __ LoadImmediate(out, 0); 3428 if (slow_path != nullptr) { 3429 __ Bind(slow_path->GetExitLabel()); 3430 } 3431 __ Bind(&done); 3432} 3433 3434void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) { 3435 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary( 3436 instruction, LocationSummary::kCallOnSlowPath); 3437 locations->SetInAt(0, Location::RequiresRegister()); 3438 locations->SetInAt(1, Location::RequiresRegister()); 3439 locations->AddTemp(Location::RequiresRegister()); 3440} 3441 3442void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) { 3443 LocationSummary* locations = instruction->GetLocations(); 3444 Register obj = locations->InAt(0).AsRegister<Register>(); 3445 Register cls = locations->InAt(1).AsRegister<Register>(); 3446 Register temp = locations->GetTemp(0).AsRegister<Register>(); 3447 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); 3448 3449 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM( 3450 instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc()); 3451 codegen_->AddSlowPath(slow_path); 3452 3453 // TODO: avoid this check if we know obj is not null. 3454 __ cmp(obj, ShifterOperand(0)); 3455 __ b(slow_path->GetExitLabel(), EQ); 3456 // Compare the class of `obj` with `cls`. 3457 __ LoadFromOffset(kLoadWord, temp, obj, class_offset); 3458 __ cmp(temp, ShifterOperand(cls)); 3459 __ b(slow_path->GetEntryLabel(), NE); 3460 __ Bind(slow_path->GetExitLabel()); 3461} 3462 3463void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) { 3464 LocationSummary* locations = 3465 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 3466 InvokeRuntimeCallingConvention calling_convention; 3467 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 3468} 3469 3470void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) { 3471 codegen_->InvokeRuntime(instruction->IsEnter() 3472 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject), 3473 instruction, 3474 instruction->GetDexPc()); 3475} 3476 3477void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); } 3478void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); } 3479void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); } 3480 3481void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction) { 3482 LocationSummary* locations = 3483 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 3484 DCHECK(instruction->GetResultType() == Primitive::kPrimInt 3485 || instruction->GetResultType() == Primitive::kPrimLong); 3486 locations->SetInAt(0, Location::RequiresRegister()); 3487 locations->SetInAt(1, Location::RequiresRegister()); 3488 bool output_overlaps = (instruction->GetResultType() == Primitive::kPrimLong); 3489 locations->SetOut(Location::RequiresRegister(), output_overlaps); 3490} 3491 3492void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) { 3493 HandleBitwiseOperation(instruction); 3494} 3495 3496void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) { 3497 HandleBitwiseOperation(instruction); 3498} 3499 3500void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) { 3501 HandleBitwiseOperation(instruction); 3502} 3503 3504void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) { 3505 LocationSummary* locations = instruction->GetLocations(); 3506 3507 if (instruction->GetResultType() == Primitive::kPrimInt) { 3508 Register first = locations->InAt(0).AsRegister<Register>(); 3509 Register second = locations->InAt(1).AsRegister<Register>(); 3510 Register out = locations->Out().AsRegister<Register>(); 3511 if (instruction->IsAnd()) { 3512 __ and_(out, first, ShifterOperand(second)); 3513 } else if (instruction->IsOr()) { 3514 __ orr(out, first, ShifterOperand(second)); 3515 } else { 3516 DCHECK(instruction->IsXor()); 3517 __ eor(out, first, ShifterOperand(second)); 3518 } 3519 } else { 3520 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong); 3521 Location first = locations->InAt(0); 3522 Location second = locations->InAt(1); 3523 Location out = locations->Out(); 3524 if (instruction->IsAnd()) { 3525 __ and_(out.AsRegisterPairLow<Register>(), 3526 first.AsRegisterPairLow<Register>(), 3527 ShifterOperand(second.AsRegisterPairLow<Register>())); 3528 __ and_(out.AsRegisterPairHigh<Register>(), 3529 first.AsRegisterPairHigh<Register>(), 3530 ShifterOperand(second.AsRegisterPairHigh<Register>())); 3531 } else if (instruction->IsOr()) { 3532 __ orr(out.AsRegisterPairLow<Register>(), 3533 first.AsRegisterPairLow<Register>(), 3534 ShifterOperand(second.AsRegisterPairLow<Register>())); 3535 __ orr(out.AsRegisterPairHigh<Register>(), 3536 first.AsRegisterPairHigh<Register>(), 3537 ShifterOperand(second.AsRegisterPairHigh<Register>())); 3538 } else { 3539 DCHECK(instruction->IsXor()); 3540 __ eor(out.AsRegisterPairLow<Register>(), 3541 first.AsRegisterPairLow<Register>(), 3542 ShifterOperand(second.AsRegisterPairLow<Register>())); 3543 __ eor(out.AsRegisterPairHigh<Register>(), 3544 first.AsRegisterPairHigh<Register>(), 3545 ShifterOperand(second.AsRegisterPairHigh<Register>())); 3546 } 3547 } 3548} 3549 3550} // namespace arm 3551} // namespace art 3552