code_generator_arm.cc revision e53798a7e3267305f696bf658e418c92e63e0834
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "code_generator_arm.h" 18 19#include "entrypoints/quick/quick_entrypoints.h" 20#include "gc/accounting/card_table.h" 21#include "mirror/array-inl.h" 22#include "mirror/art_method.h" 23#include "mirror/class.h" 24#include "thread.h" 25#include "utils/arm/assembler_arm.h" 26#include "utils/arm/managed_register_arm.h" 27#include "utils/assembler.h" 28#include "utils/stack_checks.h" 29 30namespace art { 31 32namespace arm { 33 34static DRegister FromLowSToD(SRegister reg) { 35 DCHECK_EQ(reg % 2, 0); 36 return static_cast<DRegister>(reg / 2); 37} 38 39static constexpr bool kExplicitStackOverflowCheck = false; 40 41static constexpr int kNumberOfPushedRegistersAtEntry = 1 + 2; // LR, R6, R7 42static constexpr int kCurrentMethodStackOffset = 0; 43 44static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2, R3 }; 45static constexpr size_t kRuntimeParameterCoreRegistersLength = 46 arraysize(kRuntimeParameterCoreRegisters); 47static constexpr SRegister kRuntimeParameterFpuRegisters[] = { S0, S1, S2, S3 }; 48static constexpr size_t kRuntimeParameterFpuRegistersLength = 49 arraysize(kRuntimeParameterFpuRegisters); 50 51class InvokeRuntimeCallingConvention : public CallingConvention<Register, SRegister> { 52 public: 53 InvokeRuntimeCallingConvention() 54 : CallingConvention(kRuntimeParameterCoreRegisters, 55 kRuntimeParameterCoreRegistersLength, 56 kRuntimeParameterFpuRegisters, 57 kRuntimeParameterFpuRegistersLength) {} 58 59 private: 60 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention); 61}; 62 63#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())-> 64#define QUICK_ENTRY_POINT(x) QUICK_ENTRYPOINT_OFFSET(kArmWordSize, x).Int32Value() 65 66class SlowPathCodeARM : public SlowPathCode { 67 public: 68 SlowPathCodeARM() : entry_label_(), exit_label_() {} 69 70 Label* GetEntryLabel() { return &entry_label_; } 71 Label* GetExitLabel() { return &exit_label_; } 72 73 private: 74 Label entry_label_; 75 Label exit_label_; 76 77 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM); 78}; 79 80class NullCheckSlowPathARM : public SlowPathCodeARM { 81 public: 82 explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {} 83 84 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 85 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 86 __ Bind(GetEntryLabel()); 87 arm_codegen->InvokeRuntime( 88 QUICK_ENTRY_POINT(pThrowNullPointer), instruction_, instruction_->GetDexPc()); 89 } 90 91 private: 92 HNullCheck* const instruction_; 93 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM); 94}; 95 96class DivZeroCheckSlowPathARM : public SlowPathCodeARM { 97 public: 98 explicit DivZeroCheckSlowPathARM(HDivZeroCheck* instruction) : instruction_(instruction) {} 99 100 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 101 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 102 __ Bind(GetEntryLabel()); 103 arm_codegen->InvokeRuntime( 104 QUICK_ENTRY_POINT(pThrowDivZero), instruction_, instruction_->GetDexPc()); 105 } 106 107 private: 108 HDivZeroCheck* const instruction_; 109 DISALLOW_COPY_AND_ASSIGN(DivZeroCheckSlowPathARM); 110}; 111 112class StackOverflowCheckSlowPathARM : public SlowPathCodeARM { 113 public: 114 StackOverflowCheckSlowPathARM() {} 115 116 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 117 __ Bind(GetEntryLabel()); 118 __ LoadFromOffset(kLoadWord, PC, TR, 119 QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowStackOverflow).Int32Value()); 120 } 121 122 private: 123 DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM); 124}; 125 126class SuspendCheckSlowPathARM : public SlowPathCodeARM { 127 public: 128 SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor) 129 : instruction_(instruction), successor_(successor) {} 130 131 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 132 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 133 __ Bind(GetEntryLabel()); 134 codegen->SaveLiveRegisters(instruction_->GetLocations()); 135 arm_codegen->InvokeRuntime( 136 QUICK_ENTRY_POINT(pTestSuspend), instruction_, instruction_->GetDexPc()); 137 codegen->RestoreLiveRegisters(instruction_->GetLocations()); 138 if (successor_ == nullptr) { 139 __ b(GetReturnLabel()); 140 } else { 141 __ b(arm_codegen->GetLabelOf(successor_)); 142 } 143 } 144 145 Label* GetReturnLabel() { 146 DCHECK(successor_ == nullptr); 147 return &return_label_; 148 } 149 150 private: 151 HSuspendCheck* const instruction_; 152 // If not null, the block to branch to after the suspend check. 153 HBasicBlock* const successor_; 154 155 // If `successor_` is null, the label to branch to after the suspend check. 156 Label return_label_; 157 158 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM); 159}; 160 161class BoundsCheckSlowPathARM : public SlowPathCodeARM { 162 public: 163 BoundsCheckSlowPathARM(HBoundsCheck* instruction, 164 Location index_location, 165 Location length_location) 166 : instruction_(instruction), 167 index_location_(index_location), 168 length_location_(length_location) {} 169 170 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 171 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 172 __ Bind(GetEntryLabel()); 173 // We're moving two locations to locations that could overlap, so we need a parallel 174 // move resolver. 175 InvokeRuntimeCallingConvention calling_convention; 176 codegen->EmitParallelMoves( 177 index_location_, 178 Location::RegisterLocation(calling_convention.GetRegisterAt(0)), 179 length_location_, 180 Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 181 arm_codegen->InvokeRuntime( 182 QUICK_ENTRY_POINT(pThrowArrayBounds), instruction_, instruction_->GetDexPc()); 183 } 184 185 private: 186 HBoundsCheck* const instruction_; 187 const Location index_location_; 188 const Location length_location_; 189 190 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM); 191}; 192 193class LoadClassSlowPathARM : public SlowPathCodeARM { 194 public: 195 LoadClassSlowPathARM(HLoadClass* cls, 196 HInstruction* at, 197 uint32_t dex_pc, 198 bool do_clinit) 199 : cls_(cls), at_(at), dex_pc_(dex_pc), do_clinit_(do_clinit) { 200 DCHECK(at->IsLoadClass() || at->IsClinitCheck()); 201 } 202 203 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 204 LocationSummary* locations = at_->GetLocations(); 205 206 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 207 __ Bind(GetEntryLabel()); 208 codegen->SaveLiveRegisters(locations); 209 210 InvokeRuntimeCallingConvention calling_convention; 211 __ LoadImmediate(calling_convention.GetRegisterAt(0), cls_->GetTypeIndex()); 212 arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(1)); 213 int32_t entry_point_offset = do_clinit_ 214 ? QUICK_ENTRY_POINT(pInitializeStaticStorage) 215 : QUICK_ENTRY_POINT(pInitializeType); 216 arm_codegen->InvokeRuntime(entry_point_offset, at_, dex_pc_); 217 218 // Move the class to the desired location. 219 Location out = locations->Out(); 220 if (out.IsValid()) { 221 DCHECK(out.IsRegister() && !locations->GetLiveRegisters()->ContainsCoreRegister(out.reg())); 222 arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0)); 223 } 224 codegen->RestoreLiveRegisters(locations); 225 __ b(GetExitLabel()); 226 } 227 228 private: 229 // The class this slow path will load. 230 HLoadClass* const cls_; 231 232 // The instruction where this slow path is happening. 233 // (Might be the load class or an initialization check). 234 HInstruction* const at_; 235 236 // The dex PC of `at_`. 237 const uint32_t dex_pc_; 238 239 // Whether to initialize the class. 240 const bool do_clinit_; 241 242 DISALLOW_COPY_AND_ASSIGN(LoadClassSlowPathARM); 243}; 244 245class LoadStringSlowPathARM : public SlowPathCodeARM { 246 public: 247 explicit LoadStringSlowPathARM(HLoadString* instruction) : instruction_(instruction) {} 248 249 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 250 LocationSummary* locations = instruction_->GetLocations(); 251 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); 252 253 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 254 __ Bind(GetEntryLabel()); 255 codegen->SaveLiveRegisters(locations); 256 257 InvokeRuntimeCallingConvention calling_convention; 258 arm_codegen->LoadCurrentMethod(calling_convention.GetRegisterAt(0)); 259 __ LoadImmediate(calling_convention.GetRegisterAt(1), instruction_->GetStringIndex()); 260 arm_codegen->InvokeRuntime( 261 QUICK_ENTRY_POINT(pResolveString), instruction_, instruction_->GetDexPc()); 262 arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0)); 263 264 codegen->RestoreLiveRegisters(locations); 265 __ b(GetExitLabel()); 266 } 267 268 private: 269 HLoadString* const instruction_; 270 271 DISALLOW_COPY_AND_ASSIGN(LoadStringSlowPathARM); 272}; 273 274class TypeCheckSlowPathARM : public SlowPathCodeARM { 275 public: 276 TypeCheckSlowPathARM(HInstruction* instruction, 277 Location class_to_check, 278 Location object_class, 279 uint32_t dex_pc) 280 : instruction_(instruction), 281 class_to_check_(class_to_check), 282 object_class_(object_class), 283 dex_pc_(dex_pc) {} 284 285 void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 286 LocationSummary* locations = instruction_->GetLocations(); 287 DCHECK(instruction_->IsCheckCast() 288 || !locations->GetLiveRegisters()->ContainsCoreRegister(locations->Out().reg())); 289 290 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 291 __ Bind(GetEntryLabel()); 292 codegen->SaveLiveRegisters(locations); 293 294 // We're moving two locations to locations that could overlap, so we need a parallel 295 // move resolver. 296 InvokeRuntimeCallingConvention calling_convention; 297 codegen->EmitParallelMoves( 298 class_to_check_, 299 Location::RegisterLocation(calling_convention.GetRegisterAt(0)), 300 object_class_, 301 Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 302 303 if (instruction_->IsInstanceOf()) { 304 arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pInstanceofNonTrivial), instruction_, dex_pc_); 305 arm_codegen->Move32(locations->Out(), Location::RegisterLocation(R0)); 306 } else { 307 DCHECK(instruction_->IsCheckCast()); 308 arm_codegen->InvokeRuntime(QUICK_ENTRY_POINT(pCheckCast), instruction_, dex_pc_); 309 } 310 311 codegen->RestoreLiveRegisters(locations); 312 __ b(GetExitLabel()); 313 } 314 315 private: 316 HInstruction* const instruction_; 317 const Location class_to_check_; 318 const Location object_class_; 319 uint32_t dex_pc_; 320 321 DISALLOW_COPY_AND_ASSIGN(TypeCheckSlowPathARM); 322}; 323 324#undef __ 325 326#undef __ 327#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())-> 328 329inline Condition ARMCondition(IfCondition cond) { 330 switch (cond) { 331 case kCondEQ: return EQ; 332 case kCondNE: return NE; 333 case kCondLT: return LT; 334 case kCondLE: return LE; 335 case kCondGT: return GT; 336 case kCondGE: return GE; 337 default: 338 LOG(FATAL) << "Unknown if condition"; 339 } 340 return EQ; // Unreachable. 341} 342 343inline Condition ARMOppositeCondition(IfCondition cond) { 344 switch (cond) { 345 case kCondEQ: return NE; 346 case kCondNE: return EQ; 347 case kCondLT: return GE; 348 case kCondLE: return GT; 349 case kCondGT: return LE; 350 case kCondGE: return LT; 351 default: 352 LOG(FATAL) << "Unknown if condition"; 353 } 354 return EQ; // Unreachable. 355} 356 357void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const { 358 stream << ArmManagedRegister::FromCoreRegister(Register(reg)); 359} 360 361void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const { 362 stream << ArmManagedRegister::FromSRegister(SRegister(reg)); 363} 364 365size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) { 366 __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index); 367 return kArmWordSize; 368} 369 370size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) { 371 __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index); 372 return kArmWordSize; 373} 374 375CodeGeneratorARM::CodeGeneratorARM(HGraph* graph) 376 : CodeGenerator(graph, kNumberOfCoreRegisters, kNumberOfSRegisters, kNumberOfRegisterPairs), 377 block_labels_(graph->GetArena(), 0), 378 location_builder_(graph, this), 379 instruction_visitor_(graph, this), 380 move_resolver_(graph->GetArena(), this), 381 assembler_(true) {} 382 383size_t CodeGeneratorARM::FrameEntrySpillSize() const { 384 return kNumberOfPushedRegistersAtEntry * kArmWordSize; 385} 386 387Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const { 388 switch (type) { 389 case Primitive::kPrimLong: { 390 size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs); 391 ArmManagedRegister pair = 392 ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg)); 393 DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]); 394 DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]); 395 396 blocked_core_registers_[pair.AsRegisterPairLow()] = true; 397 blocked_core_registers_[pair.AsRegisterPairHigh()] = true; 398 UpdateBlockedPairRegisters(); 399 return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh()); 400 } 401 402 case Primitive::kPrimByte: 403 case Primitive::kPrimBoolean: 404 case Primitive::kPrimChar: 405 case Primitive::kPrimShort: 406 case Primitive::kPrimInt: 407 case Primitive::kPrimNot: { 408 int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters); 409 // Block all register pairs that contain `reg`. 410 for (int i = 0; i < kNumberOfRegisterPairs; i++) { 411 ArmManagedRegister current = 412 ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i)); 413 if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) { 414 blocked_register_pairs_[i] = true; 415 } 416 } 417 return Location::RegisterLocation(reg); 418 } 419 420 case Primitive::kPrimFloat: { 421 int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfSRegisters); 422 return Location::FpuRegisterLocation(reg); 423 } 424 425 case Primitive::kPrimDouble: { 426 int reg = FindTwoFreeConsecutiveAlignedEntries(blocked_fpu_registers_, kNumberOfSRegisters); 427 DCHECK_EQ(reg % 2, 0); 428 return Location::FpuRegisterPairLocation(reg, reg + 1); 429 } 430 431 case Primitive::kPrimVoid: 432 LOG(FATAL) << "Unreachable type " << type; 433 } 434 435 return Location(); 436} 437 438void CodeGeneratorARM::SetupBlockedRegisters() const { 439 // Don't allocate the dalvik style register pair passing. 440 blocked_register_pairs_[R1_R2] = true; 441 442 // Stack register, LR and PC are always reserved. 443 blocked_core_registers_[SP] = true; 444 blocked_core_registers_[LR] = true; 445 blocked_core_registers_[PC] = true; 446 447 // Reserve thread register. 448 blocked_core_registers_[TR] = true; 449 450 // Reserve temp register. 451 blocked_core_registers_[IP] = true; 452 453 // TODO: We currently don't use Quick's callee saved registers. 454 // We always save and restore R6 and R7 to make sure we can use three 455 // register pairs for long operations. 456 blocked_core_registers_[R4] = true; 457 blocked_core_registers_[R5] = true; 458 blocked_core_registers_[R8] = true; 459 blocked_core_registers_[R10] = true; 460 blocked_core_registers_[R11] = true; 461 462 blocked_fpu_registers_[S16] = true; 463 blocked_fpu_registers_[S17] = true; 464 blocked_fpu_registers_[S18] = true; 465 blocked_fpu_registers_[S19] = true; 466 blocked_fpu_registers_[S20] = true; 467 blocked_fpu_registers_[S21] = true; 468 blocked_fpu_registers_[S22] = true; 469 blocked_fpu_registers_[S23] = true; 470 blocked_fpu_registers_[S24] = true; 471 blocked_fpu_registers_[S25] = true; 472 blocked_fpu_registers_[S26] = true; 473 blocked_fpu_registers_[S27] = true; 474 blocked_fpu_registers_[S28] = true; 475 blocked_fpu_registers_[S29] = true; 476 blocked_fpu_registers_[S30] = true; 477 blocked_fpu_registers_[S31] = true; 478 479 UpdateBlockedPairRegisters(); 480} 481 482void CodeGeneratorARM::UpdateBlockedPairRegisters() const { 483 for (int i = 0; i < kNumberOfRegisterPairs; i++) { 484 ArmManagedRegister current = 485 ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i)); 486 if (blocked_core_registers_[current.AsRegisterPairLow()] 487 || blocked_core_registers_[current.AsRegisterPairHigh()]) { 488 blocked_register_pairs_[i] = true; 489 } 490 } 491} 492 493InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen) 494 : HGraphVisitor(graph), 495 assembler_(codegen->GetAssembler()), 496 codegen_(codegen) {} 497 498void CodeGeneratorARM::GenerateFrameEntry() { 499 bool skip_overflow_check = 500 IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm); 501 if (!skip_overflow_check) { 502 if (kExplicitStackOverflowCheck) { 503 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM(); 504 AddSlowPath(slow_path); 505 506 __ LoadFromOffset(kLoadWord, IP, TR, Thread::StackEndOffset<kArmWordSize>().Int32Value()); 507 __ cmp(SP, ShifterOperand(IP)); 508 __ b(slow_path->GetEntryLabel(), CC); 509 } else { 510 __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm))); 511 __ LoadFromOffset(kLoadWord, IP, IP, 0); 512 RecordPcInfo(nullptr, 0); 513 } 514 } 515 516 core_spill_mask_ |= (1 << LR | 1 << R6 | 1 << R7); 517 __ PushList(1 << LR | 1 << R6 | 1 << R7); 518 519 // The return PC has already been pushed on the stack. 520 __ AddConstant(SP, -(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize)); 521 __ StoreToOffset(kStoreWord, R0, SP, 0); 522} 523 524void CodeGeneratorARM::GenerateFrameExit() { 525 __ AddConstant(SP, GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize); 526 __ PopList(1 << PC | 1 << R6 | 1 << R7); 527} 528 529void CodeGeneratorARM::Bind(HBasicBlock* block) { 530 __ Bind(GetLabelOf(block)); 531} 532 533Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const { 534 switch (load->GetType()) { 535 case Primitive::kPrimLong: 536 case Primitive::kPrimDouble: 537 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal())); 538 break; 539 540 case Primitive::kPrimInt: 541 case Primitive::kPrimNot: 542 case Primitive::kPrimFloat: 543 return Location::StackSlot(GetStackSlot(load->GetLocal())); 544 545 case Primitive::kPrimBoolean: 546 case Primitive::kPrimByte: 547 case Primitive::kPrimChar: 548 case Primitive::kPrimShort: 549 case Primitive::kPrimVoid: 550 LOG(FATAL) << "Unexpected type " << load->GetType(); 551 } 552 553 LOG(FATAL) << "Unreachable"; 554 return Location(); 555} 556 557Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) { 558 switch (type) { 559 case Primitive::kPrimBoolean: 560 case Primitive::kPrimByte: 561 case Primitive::kPrimChar: 562 case Primitive::kPrimShort: 563 case Primitive::kPrimInt: 564 case Primitive::kPrimNot: { 565 uint32_t index = gp_index_++; 566 uint32_t stack_index = stack_index_++; 567 if (index < calling_convention.GetNumberOfRegisters()) { 568 return Location::RegisterLocation(calling_convention.GetRegisterAt(index)); 569 } else { 570 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index)); 571 } 572 } 573 574 case Primitive::kPrimLong: { 575 uint32_t index = gp_index_; 576 uint32_t stack_index = stack_index_; 577 gp_index_ += 2; 578 stack_index_ += 2; 579 if (index + 1 < calling_convention.GetNumberOfRegisters()) { 580 ArmManagedRegister pair = ArmManagedRegister::FromRegisterPair( 581 calling_convention.GetRegisterPairAt(index)); 582 return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh()); 583 } else if (index + 1 == calling_convention.GetNumberOfRegisters()) { 584 return Location::QuickParameter(index, stack_index); 585 } else { 586 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index)); 587 } 588 } 589 590 case Primitive::kPrimFloat: { 591 uint32_t stack_index = stack_index_++; 592 if (float_index_ % 2 == 0) { 593 float_index_ = std::max(double_index_, float_index_); 594 } 595 if (float_index_ < calling_convention.GetNumberOfFpuRegisters()) { 596 return Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(float_index_++)); 597 } else { 598 return Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index)); 599 } 600 } 601 602 case Primitive::kPrimDouble: { 603 double_index_ = std::max(double_index_, RoundUp(float_index_, 2)); 604 uint32_t stack_index = stack_index_; 605 stack_index_ += 2; 606 if (double_index_ + 1 < calling_convention.GetNumberOfFpuRegisters()) { 607 uint32_t index = double_index_; 608 double_index_ += 2; 609 return Location::FpuRegisterPairLocation( 610 calling_convention.GetFpuRegisterAt(index), 611 calling_convention.GetFpuRegisterAt(index + 1)); 612 } else { 613 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(stack_index)); 614 } 615 } 616 617 case Primitive::kPrimVoid: 618 LOG(FATAL) << "Unexpected parameter type " << type; 619 break; 620 } 621 return Location(); 622} 623 624Location InvokeDexCallingConventionVisitor::GetReturnLocation(Primitive::Type type) { 625 switch (type) { 626 case Primitive::kPrimBoolean: 627 case Primitive::kPrimByte: 628 case Primitive::kPrimChar: 629 case Primitive::kPrimShort: 630 case Primitive::kPrimInt: 631 case Primitive::kPrimNot: { 632 return Location::RegisterLocation(R0); 633 } 634 635 case Primitive::kPrimFloat: { 636 return Location::FpuRegisterLocation(S0); 637 } 638 639 case Primitive::kPrimLong: { 640 return Location::RegisterPairLocation(R0, R1); 641 } 642 643 case Primitive::kPrimDouble: { 644 return Location::FpuRegisterPairLocation(S0, S1); 645 } 646 647 case Primitive::kPrimVoid: 648 return Location(); 649 } 650 UNREACHABLE(); 651 return Location(); 652} 653 654void CodeGeneratorARM::Move32(Location destination, Location source) { 655 if (source.Equals(destination)) { 656 return; 657 } 658 if (destination.IsRegister()) { 659 if (source.IsRegister()) { 660 __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>()); 661 } else if (source.IsFpuRegister()) { 662 __ vmovrs(destination.AsRegister<Register>(), source.AsFpuRegister<SRegister>()); 663 } else { 664 __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), SP, source.GetStackIndex()); 665 } 666 } else if (destination.IsFpuRegister()) { 667 if (source.IsRegister()) { 668 __ vmovsr(destination.AsFpuRegister<SRegister>(), source.AsRegister<Register>()); 669 } else if (source.IsFpuRegister()) { 670 __ vmovs(destination.AsFpuRegister<SRegister>(), source.AsFpuRegister<SRegister>()); 671 } else { 672 __ LoadSFromOffset(destination.AsFpuRegister<SRegister>(), SP, source.GetStackIndex()); 673 } 674 } else { 675 DCHECK(destination.IsStackSlot()) << destination; 676 if (source.IsRegister()) { 677 __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), SP, destination.GetStackIndex()); 678 } else if (source.IsFpuRegister()) { 679 __ StoreSToOffset(source.AsFpuRegister<SRegister>(), SP, destination.GetStackIndex()); 680 } else { 681 DCHECK(source.IsStackSlot()) << source; 682 __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex()); 683 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 684 } 685 } 686} 687 688void CodeGeneratorARM::Move64(Location destination, Location source) { 689 if (source.Equals(destination)) { 690 return; 691 } 692 if (destination.IsRegisterPair()) { 693 if (source.IsRegisterPair()) { 694 EmitParallelMoves( 695 Location::RegisterLocation(source.AsRegisterPairHigh<Register>()), 696 Location::RegisterLocation(destination.AsRegisterPairHigh<Register>()), 697 Location::RegisterLocation(source.AsRegisterPairLow<Register>()), 698 Location::RegisterLocation(destination.AsRegisterPairLow<Register>())); 699 } else if (source.IsFpuRegister()) { 700 UNIMPLEMENTED(FATAL); 701 } else if (source.IsQuickParameter()) { 702 uint16_t register_index = source.GetQuickParameterRegisterIndex(); 703 uint16_t stack_index = source.GetQuickParameterStackIndex(); 704 InvokeDexCallingConvention calling_convention; 705 EmitParallelMoves( 706 Location::RegisterLocation(calling_convention.GetRegisterAt(register_index)), 707 Location::RegisterLocation(destination.AsRegisterPairLow<Register>()), 708 Location::StackSlot( 709 calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize()), 710 Location::RegisterLocation(destination.AsRegisterPairHigh<Register>())); 711 } else { 712 // No conflict possible, so just do the moves. 713 DCHECK(source.IsDoubleStackSlot()); 714 if (destination.AsRegisterPairLow<Register>() == R1) { 715 DCHECK_EQ(destination.AsRegisterPairHigh<Register>(), R2); 716 __ LoadFromOffset(kLoadWord, R1, SP, source.GetStackIndex()); 717 __ LoadFromOffset(kLoadWord, R2, SP, source.GetHighStackIndex(kArmWordSize)); 718 } else { 719 __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(), 720 SP, source.GetStackIndex()); 721 } 722 } 723 } else if (destination.IsFpuRegisterPair()) { 724 if (source.IsDoubleStackSlot()) { 725 __ LoadDFromOffset(FromLowSToD(destination.AsFpuRegisterPairLow<SRegister>()), 726 SP, 727 source.GetStackIndex()); 728 } else { 729 UNIMPLEMENTED(FATAL); 730 } 731 } else if (destination.IsQuickParameter()) { 732 InvokeDexCallingConvention calling_convention; 733 uint16_t register_index = destination.GetQuickParameterRegisterIndex(); 734 uint16_t stack_index = destination.GetQuickParameterStackIndex(); 735 if (source.IsRegisterPair()) { 736 UNIMPLEMENTED(FATAL); 737 } else if (source.IsFpuRegister()) { 738 UNIMPLEMENTED(FATAL); 739 } else { 740 DCHECK(source.IsDoubleStackSlot()); 741 EmitParallelMoves( 742 Location::StackSlot(source.GetStackIndex()), 743 Location::RegisterLocation(calling_convention.GetRegisterAt(register_index)), 744 Location::StackSlot(source.GetHighStackIndex(kArmWordSize)), 745 Location::StackSlot(calling_convention.GetStackOffsetOf(stack_index + 1))); 746 } 747 } else { 748 DCHECK(destination.IsDoubleStackSlot()); 749 if (source.IsRegisterPair()) { 750 // No conflict possible, so just do the moves. 751 if (source.AsRegisterPairLow<Register>() == R1) { 752 DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2); 753 __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex()); 754 __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize)); 755 } else { 756 __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(), 757 SP, destination.GetStackIndex()); 758 } 759 } else if (source.IsQuickParameter()) { 760 InvokeDexCallingConvention calling_convention; 761 uint16_t register_index = source.GetQuickParameterRegisterIndex(); 762 uint16_t stack_index = source.GetQuickParameterStackIndex(); 763 // Just move the low part. The only time a source is a quick parameter is 764 // when moving the parameter to its stack locations. And the (Java) caller 765 // of this method has already done that. 766 __ StoreToOffset(kStoreWord, calling_convention.GetRegisterAt(register_index), 767 SP, destination.GetStackIndex()); 768 DCHECK_EQ(calling_convention.GetStackOffsetOf(stack_index + 1) + GetFrameSize(), 769 static_cast<size_t>(destination.GetHighStackIndex(kArmWordSize))); 770 } else if (source.IsFpuRegisterPair()) { 771 __ StoreDToOffset(FromLowSToD(source.AsFpuRegisterPairLow<SRegister>()), 772 SP, 773 destination.GetStackIndex()); 774 } else { 775 DCHECK(source.IsDoubleStackSlot()); 776 EmitParallelMoves( 777 Location::StackSlot(source.GetStackIndex()), 778 Location::StackSlot(destination.GetStackIndex()), 779 Location::StackSlot(source.GetHighStackIndex(kArmWordSize)), 780 Location::StackSlot(destination.GetHighStackIndex(kArmWordSize))); 781 } 782 } 783} 784 785void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) { 786 LocationSummary* locations = instruction->GetLocations(); 787 if (locations != nullptr && locations->Out().Equals(location)) { 788 return; 789 } 790 791 if (locations != nullptr && locations->Out().IsConstant()) { 792 HConstant* const_to_move = locations->Out().GetConstant(); 793 if (const_to_move->IsIntConstant()) { 794 int32_t value = const_to_move->AsIntConstant()->GetValue(); 795 if (location.IsRegister()) { 796 __ LoadImmediate(location.AsRegister<Register>(), value); 797 } else { 798 DCHECK(location.IsStackSlot()); 799 __ LoadImmediate(IP, value); 800 __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex()); 801 } 802 } else if (const_to_move->IsLongConstant()) { 803 int64_t value = const_to_move->AsLongConstant()->GetValue(); 804 if (location.IsRegisterPair()) { 805 __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value)); 806 __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value)); 807 } else { 808 DCHECK(location.IsDoubleStackSlot()); 809 __ LoadImmediate(IP, Low32Bits(value)); 810 __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex()); 811 __ LoadImmediate(IP, High32Bits(value)); 812 __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize)); 813 } 814 } 815 } else if (instruction->IsLoadLocal()) { 816 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal()); 817 switch (instruction->GetType()) { 818 case Primitive::kPrimBoolean: 819 case Primitive::kPrimByte: 820 case Primitive::kPrimChar: 821 case Primitive::kPrimShort: 822 case Primitive::kPrimInt: 823 case Primitive::kPrimNot: 824 case Primitive::kPrimFloat: 825 Move32(location, Location::StackSlot(stack_slot)); 826 break; 827 828 case Primitive::kPrimLong: 829 case Primitive::kPrimDouble: 830 Move64(location, Location::DoubleStackSlot(stack_slot)); 831 break; 832 833 default: 834 LOG(FATAL) << "Unexpected type " << instruction->GetType(); 835 } 836 } else if (instruction->IsTemporary()) { 837 Location temp_location = GetTemporaryLocation(instruction->AsTemporary()); 838 if (temp_location.IsStackSlot()) { 839 Move32(location, temp_location); 840 } else { 841 DCHECK(temp_location.IsDoubleStackSlot()); 842 Move64(location, temp_location); 843 } 844 } else { 845 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary()); 846 switch (instruction->GetType()) { 847 case Primitive::kPrimBoolean: 848 case Primitive::kPrimByte: 849 case Primitive::kPrimChar: 850 case Primitive::kPrimShort: 851 case Primitive::kPrimNot: 852 case Primitive::kPrimInt: 853 case Primitive::kPrimFloat: 854 Move32(location, locations->Out()); 855 break; 856 857 case Primitive::kPrimLong: 858 case Primitive::kPrimDouble: 859 Move64(location, locations->Out()); 860 break; 861 862 default: 863 LOG(FATAL) << "Unexpected type " << instruction->GetType(); 864 } 865 } 866} 867 868void CodeGeneratorARM::InvokeRuntime(int32_t entry_point_offset, 869 HInstruction* instruction, 870 uint32_t dex_pc) { 871 __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset); 872 __ blx(LR); 873 RecordPcInfo(instruction, dex_pc); 874 DCHECK(instruction->IsSuspendCheck() 875 || instruction->IsBoundsCheck() 876 || instruction->IsNullCheck() 877 || instruction->IsDivZeroCheck() 878 || instruction->GetLocations()->CanCall() 879 || !IsLeafMethod()); 880} 881 882void LocationsBuilderARM::VisitGoto(HGoto* got) { 883 got->SetLocations(nullptr); 884} 885 886void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) { 887 HBasicBlock* successor = got->GetSuccessor(); 888 DCHECK(!successor->IsExitBlock()); 889 890 HBasicBlock* block = got->GetBlock(); 891 HInstruction* previous = got->GetPrevious(); 892 893 HLoopInformation* info = block->GetLoopInformation(); 894 if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) { 895 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck()); 896 GenerateSuspendCheck(info->GetSuspendCheck(), successor); 897 return; 898 } 899 900 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) { 901 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr); 902 } 903 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) { 904 __ b(codegen_->GetLabelOf(successor)); 905 } 906} 907 908void LocationsBuilderARM::VisitExit(HExit* exit) { 909 exit->SetLocations(nullptr); 910} 911 912void InstructionCodeGeneratorARM::VisitExit(HExit* exit) { 913 UNUSED(exit); 914 if (kIsDebugBuild) { 915 __ Comment("Unreachable"); 916 __ bkpt(0); 917 } 918} 919 920void LocationsBuilderARM::VisitIf(HIf* if_instr) { 921 LocationSummary* locations = 922 new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall); 923 HInstruction* cond = if_instr->InputAt(0); 924 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) { 925 locations->SetInAt(0, Location::RequiresRegister()); 926 } 927} 928 929void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) { 930 HInstruction* cond = if_instr->InputAt(0); 931 if (cond->IsIntConstant()) { 932 // Constant condition, statically compared against 1. 933 int32_t cond_value = cond->AsIntConstant()->GetValue(); 934 if (cond_value == 1) { 935 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), 936 if_instr->IfTrueSuccessor())) { 937 __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor())); 938 } 939 return; 940 } else { 941 DCHECK_EQ(cond_value, 0); 942 } 943 } else { 944 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) { 945 // Condition has been materialized, compare the output to 0 946 DCHECK(if_instr->GetLocations()->InAt(0).IsRegister()); 947 __ cmp(if_instr->GetLocations()->InAt(0).AsRegister<Register>(), 948 ShifterOperand(0)); 949 __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE); 950 } else { 951 // Condition has not been materialized, use its inputs as the 952 // comparison and its condition as the branch condition. 953 LocationSummary* locations = cond->GetLocations(); 954 Register left = locations->InAt(0).AsRegister<Register>(); 955 if (locations->InAt(1).IsRegister()) { 956 __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>())); 957 } else { 958 DCHECK(locations->InAt(1).IsConstant()); 959 int32_t value = 960 locations->InAt(1).GetConstant()->AsIntConstant()->GetValue(); 961 ShifterOperand operand; 962 if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) { 963 __ cmp(left, operand); 964 } else { 965 Register temp = IP; 966 __ LoadImmediate(temp, value); 967 __ cmp(left, ShifterOperand(temp)); 968 } 969 } 970 __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), 971 ARMCondition(cond->AsCondition()->GetCondition())); 972 } 973 } 974 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), 975 if_instr->IfFalseSuccessor())) { 976 __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor())); 977 } 978} 979 980 981void LocationsBuilderARM::VisitCondition(HCondition* comp) { 982 LocationSummary* locations = 983 new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall); 984 locations->SetInAt(0, Location::RequiresRegister()); 985 locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1))); 986 if (comp->NeedsMaterialization()) { 987 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 988 } 989} 990 991void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) { 992 if (!comp->NeedsMaterialization()) return; 993 LocationSummary* locations = comp->GetLocations(); 994 Register left = locations->InAt(0).AsRegister<Register>(); 995 996 if (locations->InAt(1).IsRegister()) { 997 __ cmp(left, ShifterOperand(locations->InAt(1).AsRegister<Register>())); 998 } else { 999 DCHECK(locations->InAt(1).IsConstant()); 1000 int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue(); 1001 ShifterOperand operand; 1002 if (GetAssembler()->ShifterOperandCanHold(R0, left, CMP, value, &operand)) { 1003 __ cmp(left, operand); 1004 } else { 1005 Register temp = IP; 1006 __ LoadImmediate(temp, value); 1007 __ cmp(left, ShifterOperand(temp)); 1008 } 1009 } 1010 __ it(ARMCondition(comp->GetCondition()), kItElse); 1011 __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(1), 1012 ARMCondition(comp->GetCondition())); 1013 __ mov(locations->Out().AsRegister<Register>(), ShifterOperand(0), 1014 ARMOppositeCondition(comp->GetCondition())); 1015} 1016 1017void LocationsBuilderARM::VisitEqual(HEqual* comp) { 1018 VisitCondition(comp); 1019} 1020 1021void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) { 1022 VisitCondition(comp); 1023} 1024 1025void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) { 1026 VisitCondition(comp); 1027} 1028 1029void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) { 1030 VisitCondition(comp); 1031} 1032 1033void LocationsBuilderARM::VisitLessThan(HLessThan* comp) { 1034 VisitCondition(comp); 1035} 1036 1037void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) { 1038 VisitCondition(comp); 1039} 1040 1041void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) { 1042 VisitCondition(comp); 1043} 1044 1045void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) { 1046 VisitCondition(comp); 1047} 1048 1049void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) { 1050 VisitCondition(comp); 1051} 1052 1053void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) { 1054 VisitCondition(comp); 1055} 1056 1057void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) { 1058 VisitCondition(comp); 1059} 1060 1061void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) { 1062 VisitCondition(comp); 1063} 1064 1065void LocationsBuilderARM::VisitLocal(HLocal* local) { 1066 local->SetLocations(nullptr); 1067} 1068 1069void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) { 1070 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock()); 1071} 1072 1073void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) { 1074 load->SetLocations(nullptr); 1075} 1076 1077void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) { 1078 // Nothing to do, this is driven by the code generator. 1079 UNUSED(load); 1080} 1081 1082void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) { 1083 LocationSummary* locations = 1084 new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall); 1085 switch (store->InputAt(1)->GetType()) { 1086 case Primitive::kPrimBoolean: 1087 case Primitive::kPrimByte: 1088 case Primitive::kPrimChar: 1089 case Primitive::kPrimShort: 1090 case Primitive::kPrimInt: 1091 case Primitive::kPrimNot: 1092 case Primitive::kPrimFloat: 1093 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal()))); 1094 break; 1095 1096 case Primitive::kPrimLong: 1097 case Primitive::kPrimDouble: 1098 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal()))); 1099 break; 1100 1101 default: 1102 LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType(); 1103 } 1104} 1105 1106void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) { 1107 UNUSED(store); 1108} 1109 1110void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) { 1111 LocationSummary* locations = 1112 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 1113 locations->SetOut(Location::ConstantLocation(constant)); 1114} 1115 1116void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) { 1117 // Will be generated at use site. 1118 UNUSED(constant); 1119} 1120 1121void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) { 1122 LocationSummary* locations = 1123 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 1124 locations->SetOut(Location::ConstantLocation(constant)); 1125} 1126 1127void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) { 1128 // Will be generated at use site. 1129 UNUSED(constant); 1130} 1131 1132void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) { 1133 LocationSummary* locations = 1134 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 1135 locations->SetOut(Location::ConstantLocation(constant)); 1136} 1137 1138void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) { 1139 // Will be generated at use site. 1140 UNUSED(constant); 1141} 1142 1143void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) { 1144 LocationSummary* locations = 1145 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 1146 locations->SetOut(Location::ConstantLocation(constant)); 1147} 1148 1149void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) { 1150 // Will be generated at use site. 1151 UNUSED(constant); 1152} 1153 1154void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) { 1155 ret->SetLocations(nullptr); 1156} 1157 1158void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) { 1159 UNUSED(ret); 1160 codegen_->GenerateFrameExit(); 1161} 1162 1163void LocationsBuilderARM::VisitReturn(HReturn* ret) { 1164 LocationSummary* locations = 1165 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall); 1166 locations->SetInAt(0, parameter_visitor_.GetReturnLocation(ret->InputAt(0)->GetType())); 1167} 1168 1169void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) { 1170 UNUSED(ret); 1171 codegen_->GenerateFrameExit(); 1172} 1173 1174void LocationsBuilderARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) { 1175 HandleInvoke(invoke); 1176} 1177 1178void CodeGeneratorARM::LoadCurrentMethod(Register reg) { 1179 __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset); 1180} 1181 1182void InstructionCodeGeneratorARM::VisitInvokeStaticOrDirect(HInvokeStaticOrDirect* invoke) { 1183 Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>(); 1184 1185 // TODO: Implement all kinds of calls: 1186 // 1) boot -> boot 1187 // 2) app -> boot 1188 // 3) app -> app 1189 // 1190 // Currently we implement the app -> app logic, which looks up in the resolve cache. 1191 1192 // temp = method; 1193 codegen_->LoadCurrentMethod(temp); 1194 // temp = temp->dex_cache_resolved_methods_; 1195 __ LoadFromOffset( 1196 kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()); 1197 // temp = temp[index_in_cache] 1198 __ LoadFromOffset( 1199 kLoadWord, temp, temp, CodeGenerator::GetCacheOffset(invoke->GetIndexInDexCache())); 1200 // LR = temp[offset_of_quick_compiled_code] 1201 __ LoadFromOffset(kLoadWord, LR, temp, 1202 mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset( 1203 kArmWordSize).Int32Value()); 1204 // LR() 1205 __ blx(LR); 1206 1207 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 1208 DCHECK(!codegen_->IsLeafMethod()); 1209} 1210 1211void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) { 1212 LocationSummary* locations = 1213 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall); 1214 locations->AddTemp(Location::RegisterLocation(R0)); 1215 1216 InvokeDexCallingConventionVisitor calling_convention_visitor; 1217 for (size_t i = 0; i < invoke->InputCount(); i++) { 1218 HInstruction* input = invoke->InputAt(i); 1219 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType())); 1220 } 1221 1222 locations->SetOut(calling_convention_visitor.GetReturnLocation(invoke->GetType())); 1223} 1224 1225void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) { 1226 HandleInvoke(invoke); 1227} 1228 1229void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) { 1230 Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>(); 1231 uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() + 1232 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry); 1233 LocationSummary* locations = invoke->GetLocations(); 1234 Location receiver = locations->InAt(0); 1235 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); 1236 // temp = object->GetClass(); 1237 if (receiver.IsStackSlot()) { 1238 __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex()); 1239 __ LoadFromOffset(kLoadWord, temp, temp, class_offset); 1240 } else { 1241 __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset); 1242 } 1243 // temp = temp->GetMethodAt(method_offset); 1244 uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset( 1245 kArmWordSize).Int32Value(); 1246 __ LoadFromOffset(kLoadWord, temp, temp, method_offset); 1247 // LR = temp->GetEntryPoint(); 1248 __ LoadFromOffset(kLoadWord, LR, temp, entry_point); 1249 // LR(); 1250 __ blx(LR); 1251 DCHECK(!codegen_->IsLeafMethod()); 1252 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 1253} 1254 1255void LocationsBuilderARM::VisitInvokeInterface(HInvokeInterface* invoke) { 1256 HandleInvoke(invoke); 1257 // Add the hidden argument. 1258 invoke->GetLocations()->AddTemp(Location::RegisterLocation(R12)); 1259} 1260 1261void InstructionCodeGeneratorARM::VisitInvokeInterface(HInvokeInterface* invoke) { 1262 // TODO: b/18116999, our IMTs can miss an IncompatibleClassChangeError. 1263 Register temp = invoke->GetLocations()->GetTemp(0).AsRegister<Register>(); 1264 uint32_t method_offset = mirror::Class::EmbeddedImTableOffset().Uint32Value() + 1265 (invoke->GetImtIndex() % mirror::Class::kImtSize) * sizeof(mirror::Class::ImTableEntry); 1266 LocationSummary* locations = invoke->GetLocations(); 1267 Location receiver = locations->InAt(0); 1268 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); 1269 1270 // Set the hidden argument. 1271 __ LoadImmediate(invoke->GetLocations()->GetTemp(1).AsRegister<Register>(), 1272 invoke->GetDexMethodIndex()); 1273 1274 // temp = object->GetClass(); 1275 if (receiver.IsStackSlot()) { 1276 __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex()); 1277 __ LoadFromOffset(kLoadWord, temp, temp, class_offset); 1278 } else { 1279 __ LoadFromOffset(kLoadWord, temp, receiver.AsRegister<Register>(), class_offset); 1280 } 1281 // temp = temp->GetImtEntryAt(method_offset); 1282 uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset( 1283 kArmWordSize).Int32Value(); 1284 __ LoadFromOffset(kLoadWord, temp, temp, method_offset); 1285 // LR = temp->GetEntryPoint(); 1286 __ LoadFromOffset(kLoadWord, LR, temp, entry_point); 1287 // LR(); 1288 __ blx(LR); 1289 DCHECK(!codegen_->IsLeafMethod()); 1290 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 1291} 1292 1293void LocationsBuilderARM::VisitNeg(HNeg* neg) { 1294 LocationSummary* locations = 1295 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall); 1296 switch (neg->GetResultType()) { 1297 case Primitive::kPrimInt: 1298 case Primitive::kPrimLong: { 1299 bool output_overlaps = (neg->GetResultType() == Primitive::kPrimLong); 1300 locations->SetInAt(0, Location::RequiresRegister()); 1301 locations->SetOut(Location::RequiresRegister(), output_overlaps); 1302 break; 1303 } 1304 1305 case Primitive::kPrimFloat: 1306 case Primitive::kPrimDouble: 1307 locations->SetInAt(0, Location::RequiresFpuRegister()); 1308 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1309 break; 1310 1311 default: 1312 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType(); 1313 } 1314} 1315 1316void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) { 1317 LocationSummary* locations = neg->GetLocations(); 1318 Location out = locations->Out(); 1319 Location in = locations->InAt(0); 1320 switch (neg->GetResultType()) { 1321 case Primitive::kPrimInt: 1322 DCHECK(in.IsRegister()); 1323 __ rsb(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(0)); 1324 break; 1325 1326 case Primitive::kPrimLong: 1327 DCHECK(in.IsRegisterPair()); 1328 // out.lo = 0 - in.lo (and update the carry/borrow (C) flag) 1329 __ rsbs(out.AsRegisterPairLow<Register>(), 1330 in.AsRegisterPairLow<Register>(), 1331 ShifterOperand(0)); 1332 // We cannot emit an RSC (Reverse Subtract with Carry) 1333 // instruction here, as it does not exist in the Thumb-2 1334 // instruction set. We use the following approach 1335 // using SBC and SUB instead. 1336 // 1337 // out.hi = -C 1338 __ sbc(out.AsRegisterPairHigh<Register>(), 1339 out.AsRegisterPairHigh<Register>(), 1340 ShifterOperand(out.AsRegisterPairHigh<Register>())); 1341 // out.hi = out.hi - in.hi 1342 __ sub(out.AsRegisterPairHigh<Register>(), 1343 out.AsRegisterPairHigh<Register>(), 1344 ShifterOperand(in.AsRegisterPairHigh<Register>())); 1345 break; 1346 1347 case Primitive::kPrimFloat: 1348 DCHECK(in.IsFpuRegister()); 1349 __ vnegs(out.AsFpuRegister<SRegister>(), in.AsFpuRegister<SRegister>()); 1350 break; 1351 1352 case Primitive::kPrimDouble: 1353 DCHECK(in.IsFpuRegisterPair()); 1354 __ vnegd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 1355 FromLowSToD(in.AsFpuRegisterPairLow<SRegister>())); 1356 break; 1357 1358 default: 1359 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType(); 1360 } 1361} 1362 1363void LocationsBuilderARM::VisitTypeConversion(HTypeConversion* conversion) { 1364 Primitive::Type result_type = conversion->GetResultType(); 1365 Primitive::Type input_type = conversion->GetInputType(); 1366 DCHECK_NE(result_type, input_type); 1367 1368 // The float-to-long and double-to-long type conversions rely on a 1369 // call to the runtime. 1370 LocationSummary::CallKind call_kind = 1371 ((input_type == Primitive::kPrimFloat || input_type == Primitive::kPrimDouble) 1372 && result_type == Primitive::kPrimLong) 1373 ? LocationSummary::kCall 1374 : LocationSummary::kNoCall; 1375 LocationSummary* locations = 1376 new (GetGraph()->GetArena()) LocationSummary(conversion, call_kind); 1377 1378 switch (result_type) { 1379 case Primitive::kPrimByte: 1380 switch (input_type) { 1381 case Primitive::kPrimShort: 1382 case Primitive::kPrimInt: 1383 case Primitive::kPrimChar: 1384 // Processing a Dex `int-to-byte' instruction. 1385 locations->SetInAt(0, Location::RequiresRegister()); 1386 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1387 break; 1388 1389 default: 1390 LOG(FATAL) << "Unexpected type conversion from " << input_type 1391 << " to " << result_type; 1392 } 1393 break; 1394 1395 case Primitive::kPrimShort: 1396 switch (input_type) { 1397 case Primitive::kPrimByte: 1398 case Primitive::kPrimInt: 1399 case Primitive::kPrimChar: 1400 // Processing a Dex `int-to-short' instruction. 1401 locations->SetInAt(0, Location::RequiresRegister()); 1402 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1403 break; 1404 1405 default: 1406 LOG(FATAL) << "Unexpected type conversion from " << input_type 1407 << " to " << result_type; 1408 } 1409 break; 1410 1411 case Primitive::kPrimInt: 1412 switch (input_type) { 1413 case Primitive::kPrimLong: 1414 // Processing a Dex `long-to-int' instruction. 1415 locations->SetInAt(0, Location::Any()); 1416 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1417 break; 1418 1419 case Primitive::kPrimFloat: 1420 // Processing a Dex `float-to-int' instruction. 1421 locations->SetInAt(0, Location::RequiresFpuRegister()); 1422 locations->SetOut(Location::RequiresRegister()); 1423 locations->AddTemp(Location::RequiresFpuRegister()); 1424 break; 1425 1426 case Primitive::kPrimDouble: 1427 // Processing a Dex `double-to-int' instruction. 1428 locations->SetInAt(0, Location::RequiresFpuRegister()); 1429 locations->SetOut(Location::RequiresRegister()); 1430 locations->AddTemp(Location::RequiresFpuRegister()); 1431 break; 1432 1433 default: 1434 LOG(FATAL) << "Unexpected type conversion from " << input_type 1435 << " to " << result_type; 1436 } 1437 break; 1438 1439 case Primitive::kPrimLong: 1440 switch (input_type) { 1441 case Primitive::kPrimByte: 1442 case Primitive::kPrimShort: 1443 case Primitive::kPrimInt: 1444 case Primitive::kPrimChar: 1445 // Processing a Dex `int-to-long' instruction. 1446 locations->SetInAt(0, Location::RequiresRegister()); 1447 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1448 break; 1449 1450 case Primitive::kPrimFloat: { 1451 // Processing a Dex `float-to-long' instruction. 1452 InvokeRuntimeCallingConvention calling_convention; 1453 locations->SetInAt(0, Location::FpuRegisterLocation( 1454 calling_convention.GetFpuRegisterAt(0))); 1455 locations->SetOut(Location::RegisterPairLocation(R0, R1)); 1456 break; 1457 } 1458 1459 case Primitive::kPrimDouble: { 1460 // Processing a Dex `double-to-long' instruction. 1461 InvokeRuntimeCallingConvention calling_convention; 1462 locations->SetInAt(0, Location::FpuRegisterPairLocation( 1463 calling_convention.GetFpuRegisterAt(0), 1464 calling_convention.GetFpuRegisterAt(1))); 1465 locations->SetOut(Location::RegisterPairLocation(R0, R1)); 1466 break; 1467 } 1468 1469 default: 1470 LOG(FATAL) << "Unexpected type conversion from " << input_type 1471 << " to " << result_type; 1472 } 1473 break; 1474 1475 case Primitive::kPrimChar: 1476 switch (input_type) { 1477 case Primitive::kPrimByte: 1478 case Primitive::kPrimShort: 1479 case Primitive::kPrimInt: 1480 // Processing a Dex `int-to-char' instruction. 1481 locations->SetInAt(0, Location::RequiresRegister()); 1482 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1483 break; 1484 1485 default: 1486 LOG(FATAL) << "Unexpected type conversion from " << input_type 1487 << " to " << result_type; 1488 } 1489 break; 1490 1491 case Primitive::kPrimFloat: 1492 switch (input_type) { 1493 case Primitive::kPrimByte: 1494 case Primitive::kPrimShort: 1495 case Primitive::kPrimInt: 1496 case Primitive::kPrimChar: 1497 // Processing a Dex `int-to-float' instruction. 1498 locations->SetInAt(0, Location::RequiresRegister()); 1499 locations->SetOut(Location::RequiresFpuRegister()); 1500 break; 1501 1502 case Primitive::kPrimLong: 1503 // Processing a Dex `long-to-float' instruction. 1504 locations->SetInAt(0, Location::RequiresRegister()); 1505 locations->SetOut(Location::RequiresFpuRegister()); 1506 locations->AddTemp(Location::RequiresRegister()); 1507 locations->AddTemp(Location::RequiresRegister()); 1508 locations->AddTemp(Location::RequiresFpuRegister()); 1509 locations->AddTemp(Location::RequiresFpuRegister()); 1510 break; 1511 1512 case Primitive::kPrimDouble: 1513 // Processing a Dex `double-to-float' instruction. 1514 locations->SetInAt(0, Location::RequiresFpuRegister()); 1515 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1516 break; 1517 1518 default: 1519 LOG(FATAL) << "Unexpected type conversion from " << input_type 1520 << " to " << result_type; 1521 }; 1522 break; 1523 1524 case Primitive::kPrimDouble: 1525 switch (input_type) { 1526 case Primitive::kPrimByte: 1527 case Primitive::kPrimShort: 1528 case Primitive::kPrimInt: 1529 case Primitive::kPrimChar: 1530 // Processing a Dex `int-to-double' instruction. 1531 locations->SetInAt(0, Location::RequiresRegister()); 1532 locations->SetOut(Location::RequiresFpuRegister()); 1533 break; 1534 1535 case Primitive::kPrimLong: 1536 // Processing a Dex `long-to-double' instruction. 1537 locations->SetInAt(0, Location::RequiresRegister()); 1538 locations->SetOut(Location::RequiresFpuRegister()); 1539 locations->AddTemp(Location::RequiresRegister()); 1540 locations->AddTemp(Location::RequiresRegister()); 1541 locations->AddTemp(Location::RequiresFpuRegister()); 1542 break; 1543 1544 case Primitive::kPrimFloat: 1545 // Processing a Dex `float-to-double' instruction. 1546 locations->SetInAt(0, Location::RequiresFpuRegister()); 1547 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1548 break; 1549 1550 default: 1551 LOG(FATAL) << "Unexpected type conversion from " << input_type 1552 << " to " << result_type; 1553 }; 1554 break; 1555 1556 default: 1557 LOG(FATAL) << "Unexpected type conversion from " << input_type 1558 << " to " << result_type; 1559 } 1560} 1561 1562void InstructionCodeGeneratorARM::VisitTypeConversion(HTypeConversion* conversion) { 1563 LocationSummary* locations = conversion->GetLocations(); 1564 Location out = locations->Out(); 1565 Location in = locations->InAt(0); 1566 Primitive::Type result_type = conversion->GetResultType(); 1567 Primitive::Type input_type = conversion->GetInputType(); 1568 DCHECK_NE(result_type, input_type); 1569 switch (result_type) { 1570 case Primitive::kPrimByte: 1571 switch (input_type) { 1572 case Primitive::kPrimShort: 1573 case Primitive::kPrimInt: 1574 case Primitive::kPrimChar: 1575 // Processing a Dex `int-to-byte' instruction. 1576 __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 8); 1577 break; 1578 1579 default: 1580 LOG(FATAL) << "Unexpected type conversion from " << input_type 1581 << " to " << result_type; 1582 } 1583 break; 1584 1585 case Primitive::kPrimShort: 1586 switch (input_type) { 1587 case Primitive::kPrimByte: 1588 case Primitive::kPrimInt: 1589 case Primitive::kPrimChar: 1590 // Processing a Dex `int-to-short' instruction. 1591 __ sbfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16); 1592 break; 1593 1594 default: 1595 LOG(FATAL) << "Unexpected type conversion from " << input_type 1596 << " to " << result_type; 1597 } 1598 break; 1599 1600 case Primitive::kPrimInt: 1601 switch (input_type) { 1602 case Primitive::kPrimLong: 1603 // Processing a Dex `long-to-int' instruction. 1604 DCHECK(out.IsRegister()); 1605 if (in.IsRegisterPair()) { 1606 __ Mov(out.AsRegister<Register>(), in.AsRegisterPairLow<Register>()); 1607 } else if (in.IsDoubleStackSlot()) { 1608 __ LoadFromOffset(kLoadWord, out.AsRegister<Register>(), SP, in.GetStackIndex()); 1609 } else { 1610 DCHECK(in.IsConstant()); 1611 DCHECK(in.GetConstant()->IsLongConstant()); 1612 int64_t value = in.GetConstant()->AsLongConstant()->GetValue(); 1613 __ LoadImmediate(out.AsRegister<Register>(), static_cast<int32_t>(value)); 1614 } 1615 break; 1616 1617 case Primitive::kPrimFloat: { 1618 // Processing a Dex `float-to-int' instruction. 1619 SRegister temp = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>(); 1620 __ vmovs(temp, in.AsFpuRegister<SRegister>()); 1621 __ vcvtis(temp, temp); 1622 __ vmovrs(out.AsRegister<Register>(), temp); 1623 break; 1624 } 1625 1626 case Primitive::kPrimDouble: { 1627 // Processing a Dex `double-to-int' instruction. 1628 SRegister temp_s = locations->GetTemp(0).AsFpuRegisterPairLow<SRegister>(); 1629 DRegister temp_d = FromLowSToD(temp_s); 1630 __ vmovd(temp_d, FromLowSToD(in.AsFpuRegisterPairLow<SRegister>())); 1631 __ vcvtid(temp_s, temp_d); 1632 __ vmovrs(out.AsRegister<Register>(), temp_s); 1633 break; 1634 } 1635 1636 default: 1637 LOG(FATAL) << "Unexpected type conversion from " << input_type 1638 << " to " << result_type; 1639 } 1640 break; 1641 1642 case Primitive::kPrimLong: 1643 switch (input_type) { 1644 case Primitive::kPrimByte: 1645 case Primitive::kPrimShort: 1646 case Primitive::kPrimInt: 1647 case Primitive::kPrimChar: 1648 // Processing a Dex `int-to-long' instruction. 1649 DCHECK(out.IsRegisterPair()); 1650 DCHECK(in.IsRegister()); 1651 __ Mov(out.AsRegisterPairLow<Register>(), in.AsRegister<Register>()); 1652 // Sign extension. 1653 __ Asr(out.AsRegisterPairHigh<Register>(), 1654 out.AsRegisterPairLow<Register>(), 1655 31); 1656 break; 1657 1658 case Primitive::kPrimFloat: 1659 // Processing a Dex `float-to-long' instruction. 1660 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pF2l), 1661 conversion, 1662 conversion->GetDexPc()); 1663 break; 1664 1665 case Primitive::kPrimDouble: 1666 // Processing a Dex `double-to-long' instruction. 1667 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pD2l), 1668 conversion, 1669 conversion->GetDexPc()); 1670 break; 1671 1672 default: 1673 LOG(FATAL) << "Unexpected type conversion from " << input_type 1674 << " to " << result_type; 1675 } 1676 break; 1677 1678 case Primitive::kPrimChar: 1679 switch (input_type) { 1680 case Primitive::kPrimByte: 1681 case Primitive::kPrimShort: 1682 case Primitive::kPrimInt: 1683 // Processing a Dex `int-to-char' instruction. 1684 __ ubfx(out.AsRegister<Register>(), in.AsRegister<Register>(), 0, 16); 1685 break; 1686 1687 default: 1688 LOG(FATAL) << "Unexpected type conversion from " << input_type 1689 << " to " << result_type; 1690 } 1691 break; 1692 1693 case Primitive::kPrimFloat: 1694 switch (input_type) { 1695 case Primitive::kPrimByte: 1696 case Primitive::kPrimShort: 1697 case Primitive::kPrimInt: 1698 case Primitive::kPrimChar: { 1699 // Processing a Dex `int-to-float' instruction. 1700 __ vmovsr(out.AsFpuRegister<SRegister>(), in.AsRegister<Register>()); 1701 __ vcvtsi(out.AsFpuRegister<SRegister>(), out.AsFpuRegister<SRegister>()); 1702 break; 1703 } 1704 1705 case Primitive::kPrimLong: { 1706 // Processing a Dex `long-to-float' instruction. 1707 Register low = in.AsRegisterPairLow<Register>(); 1708 Register high = in.AsRegisterPairHigh<Register>(); 1709 SRegister output = out.AsFpuRegister<SRegister>(); 1710 Register constant_low = locations->GetTemp(0).AsRegister<Register>(); 1711 Register constant_high = locations->GetTemp(1).AsRegister<Register>(); 1712 SRegister temp1_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>(); 1713 DRegister temp1_d = FromLowSToD(temp1_s); 1714 SRegister temp2_s = locations->GetTemp(3).AsFpuRegisterPairLow<SRegister>(); 1715 DRegister temp2_d = FromLowSToD(temp2_s); 1716 1717 // Operations use doubles for precision reasons (each 32-bit 1718 // half of a long fits in the 53-bit mantissa of a double, 1719 // but not in the 24-bit mantissa of a float). This is 1720 // especially important for the low bits. The result is 1721 // eventually converted to float. 1722 1723 // temp1_d = int-to-double(high) 1724 __ vmovsr(temp1_s, high); 1725 __ vcvtdi(temp1_d, temp1_s); 1726 // Using vmovd to load the `k2Pow32EncodingForDouble` constant 1727 // as an immediate value into `temp2_d` does not work, as 1728 // this instruction only transfers 8 significant bits of its 1729 // immediate operand. Instead, use two 32-bit core 1730 // registers to load `k2Pow32EncodingForDouble` into 1731 // `temp2_d`. 1732 __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble)); 1733 __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble)); 1734 __ vmovdrr(temp2_d, constant_low, constant_high); 1735 // temp1_d = temp1_d * 2^32 1736 __ vmuld(temp1_d, temp1_d, temp2_d); 1737 // temp2_d = unsigned-to-double(low) 1738 __ vmovsr(temp2_s, low); 1739 __ vcvtdu(temp2_d, temp2_s); 1740 // temp1_d = temp1_d + temp2_d 1741 __ vaddd(temp1_d, temp1_d, temp2_d); 1742 // output = double-to-float(temp1_d); 1743 __ vcvtsd(output, temp1_d); 1744 break; 1745 } 1746 1747 case Primitive::kPrimDouble: 1748 // Processing a Dex `double-to-float' instruction. 1749 __ vcvtsd(out.AsFpuRegister<SRegister>(), 1750 FromLowSToD(in.AsFpuRegisterPairLow<SRegister>())); 1751 break; 1752 1753 default: 1754 LOG(FATAL) << "Unexpected type conversion from " << input_type 1755 << " to " << result_type; 1756 }; 1757 break; 1758 1759 case Primitive::kPrimDouble: 1760 switch (input_type) { 1761 case Primitive::kPrimByte: 1762 case Primitive::kPrimShort: 1763 case Primitive::kPrimInt: 1764 case Primitive::kPrimChar: { 1765 // Processing a Dex `int-to-double' instruction. 1766 __ vmovsr(out.AsFpuRegisterPairLow<SRegister>(), in.AsRegister<Register>()); 1767 __ vcvtdi(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 1768 out.AsFpuRegisterPairLow<SRegister>()); 1769 break; 1770 } 1771 1772 case Primitive::kPrimLong: { 1773 // Processing a Dex `long-to-double' instruction. 1774 Register low = in.AsRegisterPairLow<Register>(); 1775 Register high = in.AsRegisterPairHigh<Register>(); 1776 SRegister out_s = out.AsFpuRegisterPairLow<SRegister>(); 1777 DRegister out_d = FromLowSToD(out_s); 1778 Register constant_low = locations->GetTemp(0).AsRegister<Register>(); 1779 Register constant_high = locations->GetTemp(1).AsRegister<Register>(); 1780 SRegister temp_s = locations->GetTemp(2).AsFpuRegisterPairLow<SRegister>(); 1781 DRegister temp_d = FromLowSToD(temp_s); 1782 1783 // out_d = int-to-double(high) 1784 __ vmovsr(out_s, high); 1785 __ vcvtdi(out_d, out_s); 1786 // Using vmovd to load the `k2Pow32EncodingForDouble` constant 1787 // as an immediate value into `temp_d` does not work, as 1788 // this instruction only transfers 8 significant bits of its 1789 // immediate operand. Instead, use two 32-bit core 1790 // registers to load `k2Pow32EncodingForDouble` into `temp_d`. 1791 __ LoadImmediate(constant_low, Low32Bits(k2Pow32EncodingForDouble)); 1792 __ LoadImmediate(constant_high, High32Bits(k2Pow32EncodingForDouble)); 1793 __ vmovdrr(temp_d, constant_low, constant_high); 1794 // out_d = out_d * 2^32 1795 __ vmuld(out_d, out_d, temp_d); 1796 // temp_d = unsigned-to-double(low) 1797 __ vmovsr(temp_s, low); 1798 __ vcvtdu(temp_d, temp_s); 1799 // out_d = out_d + temp_d 1800 __ vaddd(out_d, out_d, temp_d); 1801 break; 1802 } 1803 1804 case Primitive::kPrimFloat: 1805 // Processing a Dex `float-to-double' instruction. 1806 __ vcvtds(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 1807 in.AsFpuRegister<SRegister>()); 1808 break; 1809 1810 default: 1811 LOG(FATAL) << "Unexpected type conversion from " << input_type 1812 << " to " << result_type; 1813 }; 1814 break; 1815 1816 default: 1817 LOG(FATAL) << "Unexpected type conversion from " << input_type 1818 << " to " << result_type; 1819 } 1820} 1821 1822void LocationsBuilderARM::VisitAdd(HAdd* add) { 1823 LocationSummary* locations = 1824 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall); 1825 switch (add->GetResultType()) { 1826 case Primitive::kPrimInt: 1827 case Primitive::kPrimLong: { 1828 bool output_overlaps = (add->GetResultType() == Primitive::kPrimLong); 1829 locations->SetInAt(0, Location::RequiresRegister()); 1830 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1))); 1831 locations->SetOut(Location::RequiresRegister(), output_overlaps); 1832 break; 1833 } 1834 1835 case Primitive::kPrimFloat: 1836 case Primitive::kPrimDouble: { 1837 locations->SetInAt(0, Location::RequiresFpuRegister()); 1838 locations->SetInAt(1, Location::RequiresFpuRegister()); 1839 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1840 break; 1841 } 1842 1843 default: 1844 LOG(FATAL) << "Unexpected add type " << add->GetResultType(); 1845 } 1846} 1847 1848void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) { 1849 LocationSummary* locations = add->GetLocations(); 1850 Location out = locations->Out(); 1851 Location first = locations->InAt(0); 1852 Location second = locations->InAt(1); 1853 switch (add->GetResultType()) { 1854 case Primitive::kPrimInt: 1855 if (second.IsRegister()) { 1856 __ add(out.AsRegister<Register>(), 1857 first.AsRegister<Register>(), 1858 ShifterOperand(second.AsRegister<Register>())); 1859 } else { 1860 __ AddConstant(out.AsRegister<Register>(), 1861 first.AsRegister<Register>(), 1862 second.GetConstant()->AsIntConstant()->GetValue()); 1863 } 1864 break; 1865 1866 case Primitive::kPrimLong: 1867 __ adds(out.AsRegisterPairLow<Register>(), 1868 first.AsRegisterPairLow<Register>(), 1869 ShifterOperand(second.AsRegisterPairLow<Register>())); 1870 __ adc(out.AsRegisterPairHigh<Register>(), 1871 first.AsRegisterPairHigh<Register>(), 1872 ShifterOperand(second.AsRegisterPairHigh<Register>())); 1873 break; 1874 1875 case Primitive::kPrimFloat: 1876 __ vadds(out.AsFpuRegister<SRegister>(), 1877 first.AsFpuRegister<SRegister>(), 1878 second.AsFpuRegister<SRegister>()); 1879 break; 1880 1881 case Primitive::kPrimDouble: 1882 __ vaddd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 1883 FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()), 1884 FromLowSToD(second.AsFpuRegisterPairLow<SRegister>())); 1885 break; 1886 1887 default: 1888 LOG(FATAL) << "Unexpected add type " << add->GetResultType(); 1889 } 1890} 1891 1892void LocationsBuilderARM::VisitSub(HSub* sub) { 1893 LocationSummary* locations = 1894 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall); 1895 switch (sub->GetResultType()) { 1896 case Primitive::kPrimInt: 1897 case Primitive::kPrimLong: { 1898 bool output_overlaps = (sub->GetResultType() == Primitive::kPrimLong); 1899 locations->SetInAt(0, Location::RequiresRegister()); 1900 locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1))); 1901 locations->SetOut(Location::RequiresRegister(), output_overlaps); 1902 break; 1903 } 1904 case Primitive::kPrimFloat: 1905 case Primitive::kPrimDouble: { 1906 locations->SetInAt(0, Location::RequiresFpuRegister()); 1907 locations->SetInAt(1, Location::RequiresFpuRegister()); 1908 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1909 break; 1910 } 1911 default: 1912 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType(); 1913 } 1914} 1915 1916void InstructionCodeGeneratorARM::VisitSub(HSub* sub) { 1917 LocationSummary* locations = sub->GetLocations(); 1918 Location out = locations->Out(); 1919 Location first = locations->InAt(0); 1920 Location second = locations->InAt(1); 1921 switch (sub->GetResultType()) { 1922 case Primitive::kPrimInt: { 1923 if (second.IsRegister()) { 1924 __ sub(out.AsRegister<Register>(), 1925 first.AsRegister<Register>(), 1926 ShifterOperand(second.AsRegister<Register>())); 1927 } else { 1928 __ AddConstant(out.AsRegister<Register>(), 1929 first.AsRegister<Register>(), 1930 -second.GetConstant()->AsIntConstant()->GetValue()); 1931 } 1932 break; 1933 } 1934 1935 case Primitive::kPrimLong: { 1936 __ subs(out.AsRegisterPairLow<Register>(), 1937 first.AsRegisterPairLow<Register>(), 1938 ShifterOperand(second.AsRegisterPairLow<Register>())); 1939 __ sbc(out.AsRegisterPairHigh<Register>(), 1940 first.AsRegisterPairHigh<Register>(), 1941 ShifterOperand(second.AsRegisterPairHigh<Register>())); 1942 break; 1943 } 1944 1945 case Primitive::kPrimFloat: { 1946 __ vsubs(out.AsFpuRegister<SRegister>(), 1947 first.AsFpuRegister<SRegister>(), 1948 second.AsFpuRegister<SRegister>()); 1949 break; 1950 } 1951 1952 case Primitive::kPrimDouble: { 1953 __ vsubd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 1954 FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()), 1955 FromLowSToD(second.AsFpuRegisterPairLow<SRegister>())); 1956 break; 1957 } 1958 1959 1960 default: 1961 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType(); 1962 } 1963} 1964 1965void LocationsBuilderARM::VisitMul(HMul* mul) { 1966 LocationSummary* locations = 1967 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall); 1968 switch (mul->GetResultType()) { 1969 case Primitive::kPrimInt: 1970 case Primitive::kPrimLong: { 1971 locations->SetInAt(0, Location::RequiresRegister()); 1972 locations->SetInAt(1, Location::RequiresRegister()); 1973 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1974 break; 1975 } 1976 1977 case Primitive::kPrimFloat: 1978 case Primitive::kPrimDouble: { 1979 locations->SetInAt(0, Location::RequiresFpuRegister()); 1980 locations->SetInAt(1, Location::RequiresFpuRegister()); 1981 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 1982 break; 1983 } 1984 1985 default: 1986 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType(); 1987 } 1988} 1989 1990void InstructionCodeGeneratorARM::VisitMul(HMul* mul) { 1991 LocationSummary* locations = mul->GetLocations(); 1992 Location out = locations->Out(); 1993 Location first = locations->InAt(0); 1994 Location second = locations->InAt(1); 1995 switch (mul->GetResultType()) { 1996 case Primitive::kPrimInt: { 1997 __ mul(out.AsRegister<Register>(), 1998 first.AsRegister<Register>(), 1999 second.AsRegister<Register>()); 2000 break; 2001 } 2002 case Primitive::kPrimLong: { 2003 Register out_hi = out.AsRegisterPairHigh<Register>(); 2004 Register out_lo = out.AsRegisterPairLow<Register>(); 2005 Register in1_hi = first.AsRegisterPairHigh<Register>(); 2006 Register in1_lo = first.AsRegisterPairLow<Register>(); 2007 Register in2_hi = second.AsRegisterPairHigh<Register>(); 2008 Register in2_lo = second.AsRegisterPairLow<Register>(); 2009 2010 // Extra checks to protect caused by the existence of R1_R2. 2011 // The algorithm is wrong if out.hi is either in1.lo or in2.lo: 2012 // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2); 2013 DCHECK_NE(out_hi, in1_lo); 2014 DCHECK_NE(out_hi, in2_lo); 2015 2016 // input: in1 - 64 bits, in2 - 64 bits 2017 // output: out 2018 // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo 2019 // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32] 2020 // parts: out.lo = (in1.lo * in2.lo)[31:0] 2021 2022 // IP <- in1.lo * in2.hi 2023 __ mul(IP, in1_lo, in2_hi); 2024 // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo 2025 __ mla(out_hi, in1_hi, in2_lo, IP); 2026 // out.lo <- (in1.lo * in2.lo)[31:0]; 2027 __ umull(out_lo, IP, in1_lo, in2_lo); 2028 // out.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32] 2029 __ add(out_hi, out_hi, ShifterOperand(IP)); 2030 break; 2031 } 2032 2033 case Primitive::kPrimFloat: { 2034 __ vmuls(out.AsFpuRegister<SRegister>(), 2035 first.AsFpuRegister<SRegister>(), 2036 second.AsFpuRegister<SRegister>()); 2037 break; 2038 } 2039 2040 case Primitive::kPrimDouble: { 2041 __ vmuld(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 2042 FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()), 2043 FromLowSToD(second.AsFpuRegisterPairLow<SRegister>())); 2044 break; 2045 } 2046 2047 default: 2048 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType(); 2049 } 2050} 2051 2052void LocationsBuilderARM::VisitDiv(HDiv* div) { 2053 LocationSummary::CallKind call_kind = div->GetResultType() == Primitive::kPrimLong 2054 ? LocationSummary::kCall 2055 : LocationSummary::kNoCall; 2056 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(div, call_kind); 2057 2058 switch (div->GetResultType()) { 2059 case Primitive::kPrimInt: { 2060 locations->SetInAt(0, Location::RequiresRegister()); 2061 locations->SetInAt(1, Location::RequiresRegister()); 2062 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 2063 break; 2064 } 2065 case Primitive::kPrimLong: { 2066 InvokeRuntimeCallingConvention calling_convention; 2067 locations->SetInAt(0, Location::RegisterPairLocation( 2068 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1))); 2069 locations->SetInAt(1, Location::RegisterPairLocation( 2070 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3))); 2071 // The runtime helper puts the output in R0,R2. 2072 locations->SetOut(Location::RegisterPairLocation(R0, R2)); 2073 break; 2074 } 2075 case Primitive::kPrimFloat: 2076 case Primitive::kPrimDouble: { 2077 locations->SetInAt(0, Location::RequiresFpuRegister()); 2078 locations->SetInAt(1, Location::RequiresFpuRegister()); 2079 locations->SetOut(Location::RequiresFpuRegister(), Location::kNoOutputOverlap); 2080 break; 2081 } 2082 2083 default: 2084 LOG(FATAL) << "Unexpected div type " << div->GetResultType(); 2085 } 2086} 2087 2088void InstructionCodeGeneratorARM::VisitDiv(HDiv* div) { 2089 LocationSummary* locations = div->GetLocations(); 2090 Location out = locations->Out(); 2091 Location first = locations->InAt(0); 2092 Location second = locations->InAt(1); 2093 2094 switch (div->GetResultType()) { 2095 case Primitive::kPrimInt: { 2096 __ sdiv(out.AsRegister<Register>(), 2097 first.AsRegister<Register>(), 2098 second.AsRegister<Register>()); 2099 break; 2100 } 2101 2102 case Primitive::kPrimLong: { 2103 InvokeRuntimeCallingConvention calling_convention; 2104 DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>()); 2105 DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>()); 2106 DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegisterPairLow<Register>()); 2107 DCHECK_EQ(calling_convention.GetRegisterAt(3), second.AsRegisterPairHigh<Register>()); 2108 DCHECK_EQ(R0, out.AsRegisterPairLow<Register>()); 2109 DCHECK_EQ(R2, out.AsRegisterPairHigh<Register>()); 2110 2111 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLdiv), div, div->GetDexPc()); 2112 break; 2113 } 2114 2115 case Primitive::kPrimFloat: { 2116 __ vdivs(out.AsFpuRegister<SRegister>(), 2117 first.AsFpuRegister<SRegister>(), 2118 second.AsFpuRegister<SRegister>()); 2119 break; 2120 } 2121 2122 case Primitive::kPrimDouble: { 2123 __ vdivd(FromLowSToD(out.AsFpuRegisterPairLow<SRegister>()), 2124 FromLowSToD(first.AsFpuRegisterPairLow<SRegister>()), 2125 FromLowSToD(second.AsFpuRegisterPairLow<SRegister>())); 2126 break; 2127 } 2128 2129 default: 2130 LOG(FATAL) << "Unexpected div type " << div->GetResultType(); 2131 } 2132} 2133 2134void LocationsBuilderARM::VisitRem(HRem* rem) { 2135 Primitive::Type type = rem->GetResultType(); 2136 LocationSummary::CallKind call_kind = type == Primitive::kPrimInt 2137 ? LocationSummary::kNoCall 2138 : LocationSummary::kCall; 2139 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(rem, call_kind); 2140 2141 switch (type) { 2142 case Primitive::kPrimInt: { 2143 locations->SetInAt(0, Location::RequiresRegister()); 2144 locations->SetInAt(1, Location::RequiresRegister()); 2145 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 2146 locations->AddTemp(Location::RequiresRegister()); 2147 break; 2148 } 2149 case Primitive::kPrimLong: { 2150 InvokeRuntimeCallingConvention calling_convention; 2151 locations->SetInAt(0, Location::RegisterPairLocation( 2152 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1))); 2153 locations->SetInAt(1, Location::RegisterPairLocation( 2154 calling_convention.GetRegisterAt(2), calling_convention.GetRegisterAt(3))); 2155 // The runtime helper puts the output in R2,R3. 2156 locations->SetOut(Location::RegisterPairLocation(R2, R3)); 2157 break; 2158 } 2159 case Primitive::kPrimFloat: { 2160 InvokeRuntimeCallingConvention calling_convention; 2161 locations->SetInAt(0, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(0))); 2162 locations->SetInAt(1, Location::FpuRegisterLocation(calling_convention.GetFpuRegisterAt(1))); 2163 locations->SetOut(Location::FpuRegisterLocation(S0)); 2164 break; 2165 } 2166 2167 case Primitive::kPrimDouble: { 2168 InvokeRuntimeCallingConvention calling_convention; 2169 locations->SetInAt(0, Location::FpuRegisterPairLocation( 2170 calling_convention.GetFpuRegisterAt(0), calling_convention.GetFpuRegisterAt(1))); 2171 locations->SetInAt(1, Location::FpuRegisterPairLocation( 2172 calling_convention.GetFpuRegisterAt(2), calling_convention.GetFpuRegisterAt(3))); 2173 locations->SetOut(Location::Location::FpuRegisterPairLocation(S0, S1)); 2174 break; 2175 } 2176 2177 default: 2178 LOG(FATAL) << "Unexpected rem type " << type; 2179 } 2180} 2181 2182void InstructionCodeGeneratorARM::VisitRem(HRem* rem) { 2183 LocationSummary* locations = rem->GetLocations(); 2184 Location out = locations->Out(); 2185 Location first = locations->InAt(0); 2186 Location second = locations->InAt(1); 2187 2188 Primitive::Type type = rem->GetResultType(); 2189 switch (type) { 2190 case Primitive::kPrimInt: { 2191 Register reg1 = first.AsRegister<Register>(); 2192 Register reg2 = second.AsRegister<Register>(); 2193 Register temp = locations->GetTemp(0).AsRegister<Register>(); 2194 2195 // temp = reg1 / reg2 (integer division) 2196 // temp = temp * reg2 2197 // dest = reg1 - temp 2198 __ sdiv(temp, reg1, reg2); 2199 __ mul(temp, temp, reg2); 2200 __ sub(out.AsRegister<Register>(), reg1, ShifterOperand(temp)); 2201 break; 2202 } 2203 2204 case Primitive::kPrimLong: { 2205 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pLmod), rem, rem->GetDexPc()); 2206 break; 2207 } 2208 2209 case Primitive::kPrimFloat: { 2210 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmodf), rem, rem->GetDexPc()); 2211 break; 2212 } 2213 2214 case Primitive::kPrimDouble: { 2215 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pFmod), rem, rem->GetDexPc()); 2216 break; 2217 } 2218 2219 default: 2220 LOG(FATAL) << "Unexpected rem type " << type; 2221 } 2222} 2223 2224void LocationsBuilderARM::VisitDivZeroCheck(HDivZeroCheck* instruction) { 2225 LocationSummary* locations = 2226 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2227 locations->SetInAt(0, Location::RegisterOrConstant(instruction->InputAt(0))); 2228 if (instruction->HasUses()) { 2229 locations->SetOut(Location::SameAsFirstInput()); 2230 } 2231} 2232 2233void InstructionCodeGeneratorARM::VisitDivZeroCheck(HDivZeroCheck* instruction) { 2234 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) DivZeroCheckSlowPathARM(instruction); 2235 codegen_->AddSlowPath(slow_path); 2236 2237 LocationSummary* locations = instruction->GetLocations(); 2238 Location value = locations->InAt(0); 2239 2240 switch (instruction->GetType()) { 2241 case Primitive::kPrimInt: { 2242 if (value.IsRegister()) { 2243 __ cmp(value.AsRegister<Register>(), ShifterOperand(0)); 2244 __ b(slow_path->GetEntryLabel(), EQ); 2245 } else { 2246 DCHECK(value.IsConstant()) << value; 2247 if (value.GetConstant()->AsIntConstant()->GetValue() == 0) { 2248 __ b(slow_path->GetEntryLabel()); 2249 } 2250 } 2251 break; 2252 } 2253 case Primitive::kPrimLong: { 2254 if (value.IsRegisterPair()) { 2255 __ orrs(IP, 2256 value.AsRegisterPairLow<Register>(), 2257 ShifterOperand(value.AsRegisterPairHigh<Register>())); 2258 __ b(slow_path->GetEntryLabel(), EQ); 2259 } else { 2260 DCHECK(value.IsConstant()) << value; 2261 if (value.GetConstant()->AsLongConstant()->GetValue() == 0) { 2262 __ b(slow_path->GetEntryLabel()); 2263 } 2264 } 2265 break; 2266 default: 2267 LOG(FATAL) << "Unexpected type for HDivZeroCheck " << instruction->GetType(); 2268 } 2269 } 2270} 2271 2272void LocationsBuilderARM::HandleShift(HBinaryOperation* op) { 2273 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr()); 2274 2275 LocationSummary::CallKind call_kind = op->GetResultType() == Primitive::kPrimLong 2276 ? LocationSummary::kCall 2277 : LocationSummary::kNoCall; 2278 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(op, call_kind); 2279 2280 switch (op->GetResultType()) { 2281 case Primitive::kPrimInt: { 2282 locations->SetInAt(0, Location::RequiresRegister()); 2283 locations->SetInAt(1, Location::RegisterOrConstant(op->InputAt(1))); 2284 locations->SetOut(Location::RequiresRegister()); 2285 break; 2286 } 2287 case Primitive::kPrimLong: { 2288 InvokeRuntimeCallingConvention calling_convention; 2289 locations->SetInAt(0, Location::RegisterPairLocation( 2290 calling_convention.GetRegisterAt(0), calling_convention.GetRegisterAt(1))); 2291 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); 2292 // The runtime helper puts the output in R0,R2. 2293 locations->SetOut(Location::RegisterPairLocation(R0, R2)); 2294 break; 2295 } 2296 default: 2297 LOG(FATAL) << "Unexpected operation type " << op->GetResultType(); 2298 } 2299} 2300 2301void InstructionCodeGeneratorARM::HandleShift(HBinaryOperation* op) { 2302 DCHECK(op->IsShl() || op->IsShr() || op->IsUShr()); 2303 2304 LocationSummary* locations = op->GetLocations(); 2305 Location out = locations->Out(); 2306 Location first = locations->InAt(0); 2307 Location second = locations->InAt(1); 2308 2309 Primitive::Type type = op->GetResultType(); 2310 switch (type) { 2311 case Primitive::kPrimInt: { 2312 Register out_reg = out.AsRegister<Register>(); 2313 Register first_reg = first.AsRegister<Register>(); 2314 // Arm doesn't mask the shift count so we need to do it ourselves. 2315 if (second.IsRegister()) { 2316 Register second_reg = second.AsRegister<Register>(); 2317 __ and_(second_reg, second_reg, ShifterOperand(kMaxIntShiftValue)); 2318 if (op->IsShl()) { 2319 __ Lsl(out_reg, first_reg, second_reg); 2320 } else if (op->IsShr()) { 2321 __ Asr(out_reg, first_reg, second_reg); 2322 } else { 2323 __ Lsr(out_reg, first_reg, second_reg); 2324 } 2325 } else { 2326 int32_t cst = second.GetConstant()->AsIntConstant()->GetValue(); 2327 uint32_t shift_value = static_cast<uint32_t>(cst & kMaxIntShiftValue); 2328 if (shift_value == 0) { // arm does not support shifting with 0 immediate. 2329 __ Mov(out_reg, first_reg); 2330 } else if (op->IsShl()) { 2331 __ Lsl(out_reg, first_reg, shift_value); 2332 } else if (op->IsShr()) { 2333 __ Asr(out_reg, first_reg, shift_value); 2334 } else { 2335 __ Lsr(out_reg, first_reg, shift_value); 2336 } 2337 } 2338 break; 2339 } 2340 case Primitive::kPrimLong: { 2341 // TODO: Inline the assembly instead of calling the runtime. 2342 InvokeRuntimeCallingConvention calling_convention; 2343 DCHECK_EQ(calling_convention.GetRegisterAt(0), first.AsRegisterPairLow<Register>()); 2344 DCHECK_EQ(calling_convention.GetRegisterAt(1), first.AsRegisterPairHigh<Register>()); 2345 DCHECK_EQ(calling_convention.GetRegisterAt(2), second.AsRegister<Register>()); 2346 DCHECK_EQ(R0, out.AsRegisterPairLow<Register>()); 2347 DCHECK_EQ(R2, out.AsRegisterPairHigh<Register>()); 2348 2349 int32_t entry_point_offset; 2350 if (op->IsShl()) { 2351 entry_point_offset = QUICK_ENTRY_POINT(pShlLong); 2352 } else if (op->IsShr()) { 2353 entry_point_offset = QUICK_ENTRY_POINT(pShrLong); 2354 } else { 2355 entry_point_offset = QUICK_ENTRY_POINT(pUshrLong); 2356 } 2357 __ LoadFromOffset(kLoadWord, LR, TR, entry_point_offset); 2358 __ blx(LR); 2359 break; 2360 } 2361 default: 2362 LOG(FATAL) << "Unexpected operation type " << type; 2363 } 2364} 2365 2366void LocationsBuilderARM::VisitShl(HShl* shl) { 2367 HandleShift(shl); 2368} 2369 2370void InstructionCodeGeneratorARM::VisitShl(HShl* shl) { 2371 HandleShift(shl); 2372} 2373 2374void LocationsBuilderARM::VisitShr(HShr* shr) { 2375 HandleShift(shr); 2376} 2377 2378void InstructionCodeGeneratorARM::VisitShr(HShr* shr) { 2379 HandleShift(shr); 2380} 2381 2382void LocationsBuilderARM::VisitUShr(HUShr* ushr) { 2383 HandleShift(ushr); 2384} 2385 2386void InstructionCodeGeneratorARM::VisitUShr(HUShr* ushr) { 2387 HandleShift(ushr); 2388} 2389 2390void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) { 2391 LocationSummary* locations = 2392 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 2393 InvokeRuntimeCallingConvention calling_convention; 2394 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 2395 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 2396 locations->SetOut(Location::RegisterLocation(R0)); 2397} 2398 2399void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) { 2400 InvokeRuntimeCallingConvention calling_convention; 2401 codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1)); 2402 __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex()); 2403 codegen_->InvokeRuntime( 2404 QUICK_ENTRY_POINT(pAllocObjectWithAccessCheck), instruction, instruction->GetDexPc()); 2405} 2406 2407void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) { 2408 LocationSummary* locations = 2409 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 2410 InvokeRuntimeCallingConvention calling_convention; 2411 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 2412 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 2413 locations->SetOut(Location::RegisterLocation(R0)); 2414 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); 2415} 2416 2417void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) { 2418 InvokeRuntimeCallingConvention calling_convention; 2419 codegen_->LoadCurrentMethod(calling_convention.GetRegisterAt(1)); 2420 __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex()); 2421 codegen_->InvokeRuntime( 2422 QUICK_ENTRY_POINT(pAllocArrayWithAccessCheck), instruction, instruction->GetDexPc()); 2423} 2424 2425void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) { 2426 LocationSummary* locations = 2427 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2428 Location location = parameter_visitor_.GetNextLocation(instruction->GetType()); 2429 if (location.IsStackSlot()) { 2430 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); 2431 } else if (location.IsDoubleStackSlot()) { 2432 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); 2433 } 2434 locations->SetOut(location); 2435} 2436 2437void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) { 2438 // Nothing to do, the parameter is already at its location. 2439 UNUSED(instruction); 2440} 2441 2442void LocationsBuilderARM::VisitNot(HNot* not_) { 2443 LocationSummary* locations = 2444 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall); 2445 locations->SetInAt(0, Location::RequiresRegister()); 2446 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 2447} 2448 2449void InstructionCodeGeneratorARM::VisitNot(HNot* not_) { 2450 LocationSummary* locations = not_->GetLocations(); 2451 Location out = locations->Out(); 2452 Location in = locations->InAt(0); 2453 switch (not_->InputAt(0)->GetType()) { 2454 case Primitive::kPrimBoolean: 2455 __ eor(out.AsRegister<Register>(), in.AsRegister<Register>(), ShifterOperand(1)); 2456 break; 2457 2458 case Primitive::kPrimInt: 2459 __ mvn(out.AsRegister<Register>(), ShifterOperand(in.AsRegister<Register>())); 2460 break; 2461 2462 case Primitive::kPrimLong: 2463 __ mvn(out.AsRegisterPairLow<Register>(), 2464 ShifterOperand(in.AsRegisterPairLow<Register>())); 2465 __ mvn(out.AsRegisterPairHigh<Register>(), 2466 ShifterOperand(in.AsRegisterPairHigh<Register>())); 2467 break; 2468 2469 default: 2470 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType(); 2471 } 2472} 2473 2474void LocationsBuilderARM::VisitCompare(HCompare* compare) { 2475 LocationSummary* locations = 2476 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall); 2477 switch (compare->InputAt(0)->GetType()) { 2478 case Primitive::kPrimLong: { 2479 locations->SetInAt(0, Location::RequiresRegister()); 2480 locations->SetInAt(1, Location::RequiresRegister()); 2481 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 2482 break; 2483 } 2484 case Primitive::kPrimFloat: 2485 case Primitive::kPrimDouble: { 2486 locations->SetInAt(0, Location::RequiresFpuRegister()); 2487 locations->SetInAt(1, Location::RequiresFpuRegister()); 2488 locations->SetOut(Location::RequiresRegister()); 2489 break; 2490 } 2491 default: 2492 LOG(FATAL) << "Unexpected type for compare operation " << compare->InputAt(0)->GetType(); 2493 } 2494} 2495 2496void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) { 2497 LocationSummary* locations = compare->GetLocations(); 2498 Register out = locations->Out().AsRegister<Register>(); 2499 Location left = locations->InAt(0); 2500 Location right = locations->InAt(1); 2501 2502 Label less, greater, done; 2503 Primitive::Type type = compare->InputAt(0)->GetType(); 2504 switch (type) { 2505 case Primitive::kPrimLong: { 2506 __ cmp(left.AsRegisterPairHigh<Register>(), 2507 ShifterOperand(right.AsRegisterPairHigh<Register>())); // Signed compare. 2508 __ b(&less, LT); 2509 __ b(&greater, GT); 2510 // Do LoadImmediate before any `cmp`, as LoadImmediate might affect the status flags. 2511 __ LoadImmediate(out, 0); 2512 __ cmp(left.AsRegisterPairLow<Register>(), 2513 ShifterOperand(right.AsRegisterPairLow<Register>())); // Unsigned compare. 2514 break; 2515 } 2516 case Primitive::kPrimFloat: 2517 case Primitive::kPrimDouble: { 2518 __ LoadImmediate(out, 0); 2519 if (type == Primitive::kPrimFloat) { 2520 __ vcmps(left.AsFpuRegister<SRegister>(), right.AsFpuRegister<SRegister>()); 2521 } else { 2522 __ vcmpd(FromLowSToD(left.AsFpuRegisterPairLow<SRegister>()), 2523 FromLowSToD(right.AsFpuRegisterPairLow<SRegister>())); 2524 } 2525 __ vmstat(); // transfer FP status register to ARM APSR. 2526 __ b(compare->IsGtBias() ? &greater : &less, VS); // VS for unordered. 2527 break; 2528 } 2529 default: 2530 LOG(FATAL) << "Unexpected compare type " << type; 2531 } 2532 __ b(&done, EQ); 2533 __ b(&less, CC); // CC is for both: unsigned compare for longs and 'less than' for floats. 2534 2535 __ Bind(&greater); 2536 __ LoadImmediate(out, 1); 2537 __ b(&done); 2538 2539 __ Bind(&less); 2540 __ LoadImmediate(out, -1); 2541 2542 __ Bind(&done); 2543} 2544 2545void LocationsBuilderARM::VisitPhi(HPhi* instruction) { 2546 LocationSummary* locations = 2547 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2548 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) { 2549 locations->SetInAt(i, Location::Any()); 2550 } 2551 locations->SetOut(Location::Any()); 2552} 2553 2554void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) { 2555 UNUSED(instruction); 2556 LOG(FATAL) << "Unreachable"; 2557} 2558 2559void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) { 2560 LocationSummary* locations = 2561 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2562 bool needs_write_barrier = 2563 CodeGenerator::StoreNeedsWriteBarrier(instruction->GetFieldType(), instruction->GetValue()); 2564 locations->SetInAt(0, Location::RequiresRegister()); 2565 locations->SetInAt(1, Location::RequiresRegister()); 2566 // Temporary registers for the write barrier. 2567 if (needs_write_barrier) { 2568 locations->AddTemp(Location::RequiresRegister()); 2569 locations->AddTemp(Location::RequiresRegister()); 2570 } 2571} 2572 2573void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) { 2574 LocationSummary* locations = instruction->GetLocations(); 2575 Register obj = locations->InAt(0).AsRegister<Register>(); 2576 uint32_t offset = instruction->GetFieldOffset().Uint32Value(); 2577 Primitive::Type field_type = instruction->GetFieldType(); 2578 2579 switch (field_type) { 2580 case Primitive::kPrimBoolean: 2581 case Primitive::kPrimByte: { 2582 Register value = locations->InAt(1).AsRegister<Register>(); 2583 __ StoreToOffset(kStoreByte, value, obj, offset); 2584 break; 2585 } 2586 2587 case Primitive::kPrimShort: 2588 case Primitive::kPrimChar: { 2589 Register value = locations->InAt(1).AsRegister<Register>(); 2590 __ StoreToOffset(kStoreHalfword, value, obj, offset); 2591 break; 2592 } 2593 2594 case Primitive::kPrimInt: 2595 case Primitive::kPrimNot: { 2596 Register value = locations->InAt(1).AsRegister<Register>(); 2597 __ StoreToOffset(kStoreWord, value, obj, offset); 2598 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->GetValue())) { 2599 Register temp = locations->GetTemp(0).AsRegister<Register>(); 2600 Register card = locations->GetTemp(1).AsRegister<Register>(); 2601 codegen_->MarkGCCard(temp, card, obj, value); 2602 } 2603 break; 2604 } 2605 2606 case Primitive::kPrimLong: { 2607 Location value = locations->InAt(1); 2608 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset); 2609 break; 2610 } 2611 2612 case Primitive::kPrimFloat: { 2613 SRegister value = locations->InAt(1).AsFpuRegister<SRegister>(); 2614 __ StoreSToOffset(value, obj, offset); 2615 break; 2616 } 2617 2618 case Primitive::kPrimDouble: { 2619 DRegister value = FromLowSToD(locations->InAt(1).AsFpuRegisterPairLow<SRegister>()); 2620 __ StoreDToOffset(value, obj, offset); 2621 break; 2622 } 2623 2624 case Primitive::kPrimVoid: 2625 LOG(FATAL) << "Unreachable type " << field_type; 2626 UNREACHABLE(); 2627 } 2628} 2629 2630void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) { 2631 LocationSummary* locations = 2632 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2633 locations->SetInAt(0, Location::RequiresRegister()); 2634 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 2635} 2636 2637void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) { 2638 LocationSummary* locations = instruction->GetLocations(); 2639 Register obj = locations->InAt(0).AsRegister<Register>(); 2640 uint32_t offset = instruction->GetFieldOffset().Uint32Value(); 2641 2642 switch (instruction->GetType()) { 2643 case Primitive::kPrimBoolean: { 2644 Register out = locations->Out().AsRegister<Register>(); 2645 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset); 2646 break; 2647 } 2648 2649 case Primitive::kPrimByte: { 2650 Register out = locations->Out().AsRegister<Register>(); 2651 __ LoadFromOffset(kLoadSignedByte, out, obj, offset); 2652 break; 2653 } 2654 2655 case Primitive::kPrimShort: { 2656 Register out = locations->Out().AsRegister<Register>(); 2657 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset); 2658 break; 2659 } 2660 2661 case Primitive::kPrimChar: { 2662 Register out = locations->Out().AsRegister<Register>(); 2663 __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset); 2664 break; 2665 } 2666 2667 case Primitive::kPrimInt: 2668 case Primitive::kPrimNot: { 2669 Register out = locations->Out().AsRegister<Register>(); 2670 __ LoadFromOffset(kLoadWord, out, obj, offset); 2671 break; 2672 } 2673 2674 case Primitive::kPrimLong: { 2675 // TODO: support volatile. 2676 Location out = locations->Out(); 2677 __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset); 2678 break; 2679 } 2680 2681 case Primitive::kPrimFloat: { 2682 SRegister out = locations->Out().AsFpuRegister<SRegister>(); 2683 __ LoadSFromOffset(out, obj, offset); 2684 break; 2685 } 2686 2687 case Primitive::kPrimDouble: { 2688 DRegister out = FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()); 2689 __ LoadDFromOffset(out, obj, offset); 2690 break; 2691 } 2692 2693 case Primitive::kPrimVoid: 2694 LOG(FATAL) << "Unreachable type " << instruction->GetType(); 2695 UNREACHABLE(); 2696 } 2697} 2698 2699void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) { 2700 LocationSummary* locations = 2701 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2702 locations->SetInAt(0, Location::RequiresRegister()); 2703 if (instruction->HasUses()) { 2704 locations->SetOut(Location::SameAsFirstInput()); 2705 } 2706} 2707 2708void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) { 2709 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction); 2710 codegen_->AddSlowPath(slow_path); 2711 2712 LocationSummary* locations = instruction->GetLocations(); 2713 Location obj = locations->InAt(0); 2714 2715 if (obj.IsRegister()) { 2716 __ cmp(obj.AsRegister<Register>(), ShifterOperand(0)); 2717 __ b(slow_path->GetEntryLabel(), EQ); 2718 } else { 2719 DCHECK(obj.IsConstant()) << obj; 2720 DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0); 2721 __ b(slow_path->GetEntryLabel()); 2722 } 2723} 2724 2725void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) { 2726 LocationSummary* locations = 2727 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2728 locations->SetInAt(0, Location::RequiresRegister()); 2729 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1))); 2730 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 2731} 2732 2733void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) { 2734 LocationSummary* locations = instruction->GetLocations(); 2735 Register obj = locations->InAt(0).AsRegister<Register>(); 2736 Location index = locations->InAt(1); 2737 2738 switch (instruction->GetType()) { 2739 case Primitive::kPrimBoolean: { 2740 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value(); 2741 Register out = locations->Out().AsRegister<Register>(); 2742 if (index.IsConstant()) { 2743 size_t offset = 2744 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset; 2745 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset); 2746 } else { 2747 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>())); 2748 __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset); 2749 } 2750 break; 2751 } 2752 2753 case Primitive::kPrimByte: { 2754 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value(); 2755 Register out = locations->Out().AsRegister<Register>(); 2756 if (index.IsConstant()) { 2757 size_t offset = 2758 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset; 2759 __ LoadFromOffset(kLoadSignedByte, out, obj, offset); 2760 } else { 2761 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>())); 2762 __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset); 2763 } 2764 break; 2765 } 2766 2767 case Primitive::kPrimShort: { 2768 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value(); 2769 Register out = locations->Out().AsRegister<Register>(); 2770 if (index.IsConstant()) { 2771 size_t offset = 2772 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset; 2773 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset); 2774 } else { 2775 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2)); 2776 __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset); 2777 } 2778 break; 2779 } 2780 2781 case Primitive::kPrimChar: { 2782 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value(); 2783 Register out = locations->Out().AsRegister<Register>(); 2784 if (index.IsConstant()) { 2785 size_t offset = 2786 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset; 2787 __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset); 2788 } else { 2789 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2)); 2790 __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset); 2791 } 2792 break; 2793 } 2794 2795 case Primitive::kPrimInt: 2796 case Primitive::kPrimNot: { 2797 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t)); 2798 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value(); 2799 Register out = locations->Out().AsRegister<Register>(); 2800 if (index.IsConstant()) { 2801 size_t offset = 2802 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset; 2803 __ LoadFromOffset(kLoadWord, out, obj, offset); 2804 } else { 2805 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4)); 2806 __ LoadFromOffset(kLoadWord, out, IP, data_offset); 2807 } 2808 break; 2809 } 2810 2811 case Primitive::kPrimLong: { 2812 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value(); 2813 Location out = locations->Out(); 2814 if (index.IsConstant()) { 2815 size_t offset = 2816 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset; 2817 __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset); 2818 } else { 2819 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8)); 2820 __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset); 2821 } 2822 break; 2823 } 2824 2825 case Primitive::kPrimFloat: 2826 case Primitive::kPrimDouble: 2827 LOG(FATAL) << "Unimplemented register type " << instruction->GetType(); 2828 UNREACHABLE(); 2829 case Primitive::kPrimVoid: 2830 LOG(FATAL) << "Unreachable type " << instruction->GetType(); 2831 UNREACHABLE(); 2832 } 2833} 2834 2835void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) { 2836 Primitive::Type value_type = instruction->GetComponentType(); 2837 2838 bool needs_write_barrier = 2839 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue()); 2840 bool needs_runtime_call = instruction->NeedsTypeCheck(); 2841 2842 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary( 2843 instruction, needs_runtime_call ? LocationSummary::kCall : LocationSummary::kNoCall); 2844 if (needs_runtime_call) { 2845 InvokeRuntimeCallingConvention calling_convention; 2846 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 2847 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 2848 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); 2849 } else { 2850 locations->SetInAt(0, Location::RequiresRegister()); 2851 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1))); 2852 locations->SetInAt(2, Location::RequiresRegister()); 2853 2854 if (needs_write_barrier) { 2855 // Temporary registers for the write barrier. 2856 locations->AddTemp(Location::RequiresRegister()); 2857 locations->AddTemp(Location::RequiresRegister()); 2858 } 2859 } 2860} 2861 2862void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) { 2863 LocationSummary* locations = instruction->GetLocations(); 2864 Register obj = locations->InAt(0).AsRegister<Register>(); 2865 Location index = locations->InAt(1); 2866 Primitive::Type value_type = instruction->GetComponentType(); 2867 bool needs_runtime_call = locations->WillCall(); 2868 bool needs_write_barrier = 2869 CodeGenerator::StoreNeedsWriteBarrier(value_type, instruction->GetValue()); 2870 2871 switch (value_type) { 2872 case Primitive::kPrimBoolean: 2873 case Primitive::kPrimByte: { 2874 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value(); 2875 Register value = locations->InAt(2).AsRegister<Register>(); 2876 if (index.IsConstant()) { 2877 size_t offset = 2878 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset; 2879 __ StoreToOffset(kStoreByte, value, obj, offset); 2880 } else { 2881 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>())); 2882 __ StoreToOffset(kStoreByte, value, IP, data_offset); 2883 } 2884 break; 2885 } 2886 2887 case Primitive::kPrimShort: 2888 case Primitive::kPrimChar: { 2889 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value(); 2890 Register value = locations->InAt(2).AsRegister<Register>(); 2891 if (index.IsConstant()) { 2892 size_t offset = 2893 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset; 2894 __ StoreToOffset(kStoreHalfword, value, obj, offset); 2895 } else { 2896 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_2)); 2897 __ StoreToOffset(kStoreHalfword, value, IP, data_offset); 2898 } 2899 break; 2900 } 2901 2902 case Primitive::kPrimInt: 2903 case Primitive::kPrimNot: { 2904 if (!needs_runtime_call) { 2905 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value(); 2906 Register value = locations->InAt(2).AsRegister<Register>(); 2907 if (index.IsConstant()) { 2908 size_t offset = 2909 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset; 2910 __ StoreToOffset(kStoreWord, value, obj, offset); 2911 } else { 2912 DCHECK(index.IsRegister()) << index; 2913 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_4)); 2914 __ StoreToOffset(kStoreWord, value, IP, data_offset); 2915 } 2916 if (needs_write_barrier) { 2917 DCHECK_EQ(value_type, Primitive::kPrimNot); 2918 Register temp = locations->GetTemp(0).AsRegister<Register>(); 2919 Register card = locations->GetTemp(1).AsRegister<Register>(); 2920 codegen_->MarkGCCard(temp, card, obj, value); 2921 } 2922 } else { 2923 DCHECK_EQ(value_type, Primitive::kPrimNot); 2924 codegen_->InvokeRuntime(QUICK_ENTRY_POINT(pAputObject), 2925 instruction, 2926 instruction->GetDexPc()); 2927 } 2928 break; 2929 } 2930 2931 case Primitive::kPrimLong: { 2932 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value(); 2933 Location value = locations->InAt(2); 2934 if (index.IsConstant()) { 2935 size_t offset = 2936 (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset; 2937 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset); 2938 } else { 2939 __ add(IP, obj, ShifterOperand(index.AsRegister<Register>(), LSL, TIMES_8)); 2940 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset); 2941 } 2942 break; 2943 } 2944 2945 case Primitive::kPrimFloat: 2946 case Primitive::kPrimDouble: 2947 LOG(FATAL) << "Unimplemented register type " << instruction->GetType(); 2948 UNREACHABLE(); 2949 case Primitive::kPrimVoid: 2950 LOG(FATAL) << "Unreachable type " << instruction->GetType(); 2951 UNREACHABLE(); 2952 } 2953} 2954 2955void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) { 2956 LocationSummary* locations = 2957 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2958 locations->SetInAt(0, Location::RequiresRegister()); 2959 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 2960} 2961 2962void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) { 2963 LocationSummary* locations = instruction->GetLocations(); 2964 uint32_t offset = mirror::Array::LengthOffset().Uint32Value(); 2965 Register obj = locations->InAt(0).AsRegister<Register>(); 2966 Register out = locations->Out().AsRegister<Register>(); 2967 __ LoadFromOffset(kLoadWord, out, obj, offset); 2968} 2969 2970void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) { 2971 LocationSummary* locations = 2972 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 2973 locations->SetInAt(0, Location::RequiresRegister()); 2974 locations->SetInAt(1, Location::RequiresRegister()); 2975 if (instruction->HasUses()) { 2976 locations->SetOut(Location::SameAsFirstInput()); 2977 } 2978} 2979 2980void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) { 2981 LocationSummary* locations = instruction->GetLocations(); 2982 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM( 2983 instruction, locations->InAt(0), locations->InAt(1)); 2984 codegen_->AddSlowPath(slow_path); 2985 2986 Register index = locations->InAt(0).AsRegister<Register>(); 2987 Register length = locations->InAt(1).AsRegister<Register>(); 2988 2989 __ cmp(index, ShifterOperand(length)); 2990 __ b(slow_path->GetEntryLabel(), CS); 2991} 2992 2993void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) { 2994 Label is_null; 2995 __ CompareAndBranchIfZero(value, &is_null); 2996 __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value()); 2997 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift); 2998 __ strb(card, Address(card, temp)); 2999 __ Bind(&is_null); 3000} 3001 3002void LocationsBuilderARM::VisitTemporary(HTemporary* temp) { 3003 temp->SetLocations(nullptr); 3004} 3005 3006void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) { 3007 // Nothing to do, this is driven by the code generator. 3008 UNUSED(temp); 3009} 3010 3011void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) { 3012 UNUSED(instruction); 3013 LOG(FATAL) << "Unreachable"; 3014} 3015 3016void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) { 3017 codegen_->GetMoveResolver()->EmitNativeCode(instruction); 3018} 3019 3020void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) { 3021 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath); 3022} 3023 3024void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) { 3025 HBasicBlock* block = instruction->GetBlock(); 3026 if (block->GetLoopInformation() != nullptr) { 3027 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction); 3028 // The back edge will generate the suspend check. 3029 return; 3030 } 3031 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) { 3032 // The goto will generate the suspend check. 3033 return; 3034 } 3035 GenerateSuspendCheck(instruction, nullptr); 3036} 3037 3038void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction, 3039 HBasicBlock* successor) { 3040 SuspendCheckSlowPathARM* slow_path = 3041 new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor); 3042 codegen_->AddSlowPath(slow_path); 3043 3044 __ LoadFromOffset( 3045 kLoadUnsignedHalfword, IP, TR, Thread::ThreadFlagsOffset<kArmWordSize>().Int32Value()); 3046 __ cmp(IP, ShifterOperand(0)); 3047 // TODO: Figure out the branch offsets and use cbz/cbnz. 3048 if (successor == nullptr) { 3049 __ b(slow_path->GetEntryLabel(), NE); 3050 __ Bind(slow_path->GetReturnLabel()); 3051 } else { 3052 __ b(codegen_->GetLabelOf(successor), EQ); 3053 __ b(slow_path->GetEntryLabel()); 3054 } 3055} 3056 3057ArmAssembler* ParallelMoveResolverARM::GetAssembler() const { 3058 return codegen_->GetAssembler(); 3059} 3060 3061void ParallelMoveResolverARM::EmitMove(size_t index) { 3062 MoveOperands* move = moves_.Get(index); 3063 Location source = move->GetSource(); 3064 Location destination = move->GetDestination(); 3065 3066 if (source.IsRegister()) { 3067 if (destination.IsRegister()) { 3068 __ Mov(destination.AsRegister<Register>(), source.AsRegister<Register>()); 3069 } else { 3070 DCHECK(destination.IsStackSlot()); 3071 __ StoreToOffset(kStoreWord, source.AsRegister<Register>(), 3072 SP, destination.GetStackIndex()); 3073 } 3074 } else if (source.IsStackSlot()) { 3075 if (destination.IsRegister()) { 3076 __ LoadFromOffset(kLoadWord, destination.AsRegister<Register>(), 3077 SP, source.GetStackIndex()); 3078 } else { 3079 DCHECK(destination.IsStackSlot()); 3080 __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex()); 3081 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 3082 } 3083 } else { 3084 DCHECK(source.IsConstant()); 3085 DCHECK(source.GetConstant()->IsIntConstant()); 3086 int32_t value = source.GetConstant()->AsIntConstant()->GetValue(); 3087 if (destination.IsRegister()) { 3088 __ LoadImmediate(destination.AsRegister<Register>(), value); 3089 } else { 3090 DCHECK(destination.IsStackSlot()); 3091 __ LoadImmediate(IP, value); 3092 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 3093 } 3094 } 3095} 3096 3097void ParallelMoveResolverARM::Exchange(Register reg, int mem) { 3098 __ Mov(IP, reg); 3099 __ LoadFromOffset(kLoadWord, reg, SP, mem); 3100 __ StoreToOffset(kStoreWord, IP, SP, mem); 3101} 3102 3103void ParallelMoveResolverARM::Exchange(int mem1, int mem2) { 3104 ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters()); 3105 int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0; 3106 __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()), 3107 SP, mem1 + stack_offset); 3108 __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset); 3109 __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()), 3110 SP, mem2 + stack_offset); 3111 __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset); 3112} 3113 3114void ParallelMoveResolverARM::EmitSwap(size_t index) { 3115 MoveOperands* move = moves_.Get(index); 3116 Location source = move->GetSource(); 3117 Location destination = move->GetDestination(); 3118 3119 if (source.IsRegister() && destination.IsRegister()) { 3120 DCHECK_NE(source.AsRegister<Register>(), IP); 3121 DCHECK_NE(destination.AsRegister<Register>(), IP); 3122 __ Mov(IP, source.AsRegister<Register>()); 3123 __ Mov(source.AsRegister<Register>(), destination.AsRegister<Register>()); 3124 __ Mov(destination.AsRegister<Register>(), IP); 3125 } else if (source.IsRegister() && destination.IsStackSlot()) { 3126 Exchange(source.AsRegister<Register>(), destination.GetStackIndex()); 3127 } else if (source.IsStackSlot() && destination.IsRegister()) { 3128 Exchange(destination.AsRegister<Register>(), source.GetStackIndex()); 3129 } else if (source.IsStackSlot() && destination.IsStackSlot()) { 3130 Exchange(source.GetStackIndex(), destination.GetStackIndex()); 3131 } else { 3132 LOG(FATAL) << "Unimplemented"; 3133 } 3134} 3135 3136void ParallelMoveResolverARM::SpillScratch(int reg) { 3137 __ Push(static_cast<Register>(reg)); 3138} 3139 3140void ParallelMoveResolverARM::RestoreScratch(int reg) { 3141 __ Pop(static_cast<Register>(reg)); 3142} 3143 3144void LocationsBuilderARM::VisitLoadClass(HLoadClass* cls) { 3145 LocationSummary::CallKind call_kind = cls->CanCallRuntime() 3146 ? LocationSummary::kCallOnSlowPath 3147 : LocationSummary::kNoCall; 3148 LocationSummary* locations = 3149 new (GetGraph()->GetArena()) LocationSummary(cls, call_kind); 3150 locations->SetOut(Location::RequiresRegister()); 3151} 3152 3153void InstructionCodeGeneratorARM::VisitLoadClass(HLoadClass* cls) { 3154 Register out = cls->GetLocations()->Out().AsRegister<Register>(); 3155 if (cls->IsReferrersClass()) { 3156 DCHECK(!cls->CanCallRuntime()); 3157 DCHECK(!cls->MustGenerateClinitCheck()); 3158 codegen_->LoadCurrentMethod(out); 3159 __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value()); 3160 } else { 3161 DCHECK(cls->CanCallRuntime()); 3162 codegen_->LoadCurrentMethod(out); 3163 __ LoadFromOffset( 3164 kLoadWord, out, out, mirror::ArtMethod::DexCacheResolvedTypesOffset().Int32Value()); 3165 __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(cls->GetTypeIndex())); 3166 3167 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM( 3168 cls, cls, cls->GetDexPc(), cls->MustGenerateClinitCheck()); 3169 codegen_->AddSlowPath(slow_path); 3170 __ cmp(out, ShifterOperand(0)); 3171 __ b(slow_path->GetEntryLabel(), EQ); 3172 if (cls->MustGenerateClinitCheck()) { 3173 GenerateClassInitializationCheck(slow_path, out); 3174 } else { 3175 __ Bind(slow_path->GetExitLabel()); 3176 } 3177 } 3178} 3179 3180void LocationsBuilderARM::VisitClinitCheck(HClinitCheck* check) { 3181 LocationSummary* locations = 3182 new (GetGraph()->GetArena()) LocationSummary(check, LocationSummary::kCallOnSlowPath); 3183 locations->SetInAt(0, Location::RequiresRegister()); 3184 if (check->HasUses()) { 3185 locations->SetOut(Location::SameAsFirstInput()); 3186 } 3187} 3188 3189void InstructionCodeGeneratorARM::VisitClinitCheck(HClinitCheck* check) { 3190 // We assume the class is not null. 3191 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadClassSlowPathARM( 3192 check->GetLoadClass(), check, check->GetDexPc(), true); 3193 codegen_->AddSlowPath(slow_path); 3194 GenerateClassInitializationCheck(slow_path, 3195 check->GetLocations()->InAt(0).AsRegister<Register>()); 3196} 3197 3198void InstructionCodeGeneratorARM::GenerateClassInitializationCheck( 3199 SlowPathCodeARM* slow_path, Register class_reg) { 3200 __ LoadFromOffset(kLoadWord, IP, class_reg, mirror::Class::StatusOffset().Int32Value()); 3201 __ cmp(IP, ShifterOperand(mirror::Class::kStatusInitialized)); 3202 __ b(slow_path->GetEntryLabel(), LT); 3203 // Even if the initialized flag is set, we may be in a situation where caches are not synced 3204 // properly. Therefore, we do a memory fence. 3205 __ dmb(ISH); 3206 __ Bind(slow_path->GetExitLabel()); 3207} 3208 3209void LocationsBuilderARM::VisitStaticFieldGet(HStaticFieldGet* instruction) { 3210 LocationSummary* locations = 3211 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 3212 locations->SetInAt(0, Location::RequiresRegister()); 3213 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 3214} 3215 3216void InstructionCodeGeneratorARM::VisitStaticFieldGet(HStaticFieldGet* instruction) { 3217 LocationSummary* locations = instruction->GetLocations(); 3218 Register cls = locations->InAt(0).AsRegister<Register>(); 3219 uint32_t offset = instruction->GetFieldOffset().Uint32Value(); 3220 3221 switch (instruction->GetType()) { 3222 case Primitive::kPrimBoolean: { 3223 Register out = locations->Out().AsRegister<Register>(); 3224 __ LoadFromOffset(kLoadUnsignedByte, out, cls, offset); 3225 break; 3226 } 3227 3228 case Primitive::kPrimByte: { 3229 Register out = locations->Out().AsRegister<Register>(); 3230 __ LoadFromOffset(kLoadSignedByte, out, cls, offset); 3231 break; 3232 } 3233 3234 case Primitive::kPrimShort: { 3235 Register out = locations->Out().AsRegister<Register>(); 3236 __ LoadFromOffset(kLoadSignedHalfword, out, cls, offset); 3237 break; 3238 } 3239 3240 case Primitive::kPrimChar: { 3241 Register out = locations->Out().AsRegister<Register>(); 3242 __ LoadFromOffset(kLoadUnsignedHalfword, out, cls, offset); 3243 break; 3244 } 3245 3246 case Primitive::kPrimInt: 3247 case Primitive::kPrimNot: { 3248 Register out = locations->Out().AsRegister<Register>(); 3249 __ LoadFromOffset(kLoadWord, out, cls, offset); 3250 break; 3251 } 3252 3253 case Primitive::kPrimLong: { 3254 // TODO: support volatile. 3255 Location out = locations->Out(); 3256 __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), cls, offset); 3257 break; 3258 } 3259 3260 case Primitive::kPrimFloat: { 3261 SRegister out = locations->Out().AsFpuRegister<SRegister>(); 3262 __ LoadSFromOffset(out, cls, offset); 3263 break; 3264 } 3265 3266 case Primitive::kPrimDouble: { 3267 DRegister out = FromLowSToD(locations->Out().AsFpuRegisterPairLow<SRegister>()); 3268 __ LoadDFromOffset(out, cls, offset); 3269 break; 3270 } 3271 3272 case Primitive::kPrimVoid: 3273 LOG(FATAL) << "Unreachable type " << instruction->GetType(); 3274 UNREACHABLE(); 3275 } 3276} 3277 3278void LocationsBuilderARM::VisitStaticFieldSet(HStaticFieldSet* instruction) { 3279 LocationSummary* locations = 3280 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 3281 bool needs_write_barrier = 3282 CodeGenerator::StoreNeedsWriteBarrier(instruction->GetFieldType(), instruction->GetValue()); 3283 locations->SetInAt(0, Location::RequiresRegister()); 3284 locations->SetInAt(1, Location::RequiresRegister()); 3285 // Temporary registers for the write barrier. 3286 if (needs_write_barrier) { 3287 locations->AddTemp(Location::RequiresRegister()); 3288 locations->AddTemp(Location::RequiresRegister()); 3289 } 3290} 3291 3292void InstructionCodeGeneratorARM::VisitStaticFieldSet(HStaticFieldSet* instruction) { 3293 LocationSummary* locations = instruction->GetLocations(); 3294 Register cls = locations->InAt(0).AsRegister<Register>(); 3295 uint32_t offset = instruction->GetFieldOffset().Uint32Value(); 3296 Primitive::Type field_type = instruction->GetFieldType(); 3297 3298 switch (field_type) { 3299 case Primitive::kPrimBoolean: 3300 case Primitive::kPrimByte: { 3301 Register value = locations->InAt(1).AsRegister<Register>(); 3302 __ StoreToOffset(kStoreByte, value, cls, offset); 3303 break; 3304 } 3305 3306 case Primitive::kPrimShort: 3307 case Primitive::kPrimChar: { 3308 Register value = locations->InAt(1).AsRegister<Register>(); 3309 __ StoreToOffset(kStoreHalfword, value, cls, offset); 3310 break; 3311 } 3312 3313 case Primitive::kPrimInt: 3314 case Primitive::kPrimNot: { 3315 Register value = locations->InAt(1).AsRegister<Register>(); 3316 __ StoreToOffset(kStoreWord, value, cls, offset); 3317 if (CodeGenerator::StoreNeedsWriteBarrier(field_type, instruction->GetValue())) { 3318 Register temp = locations->GetTemp(0).AsRegister<Register>(); 3319 Register card = locations->GetTemp(1).AsRegister<Register>(); 3320 codegen_->MarkGCCard(temp, card, cls, value); 3321 } 3322 break; 3323 } 3324 3325 case Primitive::kPrimLong: { 3326 Location value = locations->InAt(1); 3327 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), cls, offset); 3328 break; 3329 } 3330 3331 case Primitive::kPrimFloat: { 3332 SRegister value = locations->InAt(1).AsFpuRegister<SRegister>(); 3333 __ StoreSToOffset(value, cls, offset); 3334 break; 3335 } 3336 3337 case Primitive::kPrimDouble: { 3338 DRegister value = FromLowSToD(locations->InAt(1).AsFpuRegisterPairLow<SRegister>()); 3339 __ StoreDToOffset(value, cls, offset); 3340 break; 3341 } 3342 3343 case Primitive::kPrimVoid: 3344 LOG(FATAL) << "Unreachable type " << field_type; 3345 UNREACHABLE(); 3346 } 3347} 3348 3349void LocationsBuilderARM::VisitLoadString(HLoadString* load) { 3350 LocationSummary* locations = 3351 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kCallOnSlowPath); 3352 locations->SetOut(Location::RequiresRegister()); 3353} 3354 3355void InstructionCodeGeneratorARM::VisitLoadString(HLoadString* load) { 3356 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) LoadStringSlowPathARM(load); 3357 codegen_->AddSlowPath(slow_path); 3358 3359 Register out = load->GetLocations()->Out().AsRegister<Register>(); 3360 codegen_->LoadCurrentMethod(out); 3361 __ LoadFromOffset(kLoadWord, out, out, mirror::ArtMethod::DeclaringClassOffset().Int32Value()); 3362 __ LoadFromOffset(kLoadWord, out, out, mirror::Class::DexCacheStringsOffset().Int32Value()); 3363 __ LoadFromOffset(kLoadWord, out, out, CodeGenerator::GetCacheOffset(load->GetStringIndex())); 3364 __ cmp(out, ShifterOperand(0)); 3365 __ b(slow_path->GetEntryLabel(), EQ); 3366 __ Bind(slow_path->GetExitLabel()); 3367} 3368 3369void LocationsBuilderARM::VisitLoadException(HLoadException* load) { 3370 LocationSummary* locations = 3371 new (GetGraph()->GetArena()) LocationSummary(load, LocationSummary::kNoCall); 3372 locations->SetOut(Location::RequiresRegister()); 3373} 3374 3375void InstructionCodeGeneratorARM::VisitLoadException(HLoadException* load) { 3376 Register out = load->GetLocations()->Out().AsRegister<Register>(); 3377 int32_t offset = Thread::ExceptionOffset<kArmWordSize>().Int32Value(); 3378 __ LoadFromOffset(kLoadWord, out, TR, offset); 3379 __ LoadImmediate(IP, 0); 3380 __ StoreToOffset(kStoreWord, IP, TR, offset); 3381} 3382 3383void LocationsBuilderARM::VisitThrow(HThrow* instruction) { 3384 LocationSummary* locations = 3385 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 3386 InvokeRuntimeCallingConvention calling_convention; 3387 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 3388} 3389 3390void InstructionCodeGeneratorARM::VisitThrow(HThrow* instruction) { 3391 codegen_->InvokeRuntime( 3392 QUICK_ENTRY_POINT(pDeliverException), instruction, instruction->GetDexPc()); 3393} 3394 3395void LocationsBuilderARM::VisitInstanceOf(HInstanceOf* instruction) { 3396 LocationSummary::CallKind call_kind = instruction->IsClassFinal() 3397 ? LocationSummary::kNoCall 3398 : LocationSummary::kCallOnSlowPath; 3399 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary(instruction, call_kind); 3400 locations->SetInAt(0, Location::RequiresRegister()); 3401 locations->SetInAt(1, Location::RequiresRegister()); 3402 locations->SetOut(Location::RequiresRegister()); 3403} 3404 3405void InstructionCodeGeneratorARM::VisitInstanceOf(HInstanceOf* instruction) { 3406 LocationSummary* locations = instruction->GetLocations(); 3407 Register obj = locations->InAt(0).AsRegister<Register>(); 3408 Register cls = locations->InAt(1).AsRegister<Register>(); 3409 Register out = locations->Out().AsRegister<Register>(); 3410 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); 3411 Label done, zero; 3412 SlowPathCodeARM* slow_path = nullptr; 3413 3414 // Return 0 if `obj` is null. 3415 // TODO: avoid this check if we know obj is not null. 3416 __ cmp(obj, ShifterOperand(0)); 3417 __ b(&zero, EQ); 3418 // Compare the class of `obj` with `cls`. 3419 __ LoadFromOffset(kLoadWord, out, obj, class_offset); 3420 __ cmp(out, ShifterOperand(cls)); 3421 if (instruction->IsClassFinal()) { 3422 // Classes must be equal for the instanceof to succeed. 3423 __ b(&zero, NE); 3424 __ LoadImmediate(out, 1); 3425 __ b(&done); 3426 } else { 3427 // If the classes are not equal, we go into a slow path. 3428 DCHECK(locations->OnlyCallsOnSlowPath()); 3429 slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM( 3430 instruction, locations->InAt(1), locations->Out(), instruction->GetDexPc()); 3431 codegen_->AddSlowPath(slow_path); 3432 __ b(slow_path->GetEntryLabel(), NE); 3433 __ LoadImmediate(out, 1); 3434 __ b(&done); 3435 } 3436 __ Bind(&zero); 3437 __ LoadImmediate(out, 0); 3438 if (slow_path != nullptr) { 3439 __ Bind(slow_path->GetExitLabel()); 3440 } 3441 __ Bind(&done); 3442} 3443 3444void LocationsBuilderARM::VisitCheckCast(HCheckCast* instruction) { 3445 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary( 3446 instruction, LocationSummary::kCallOnSlowPath); 3447 locations->SetInAt(0, Location::RequiresRegister()); 3448 locations->SetInAt(1, Location::RequiresRegister()); 3449 locations->AddTemp(Location::RequiresRegister()); 3450} 3451 3452void InstructionCodeGeneratorARM::VisitCheckCast(HCheckCast* instruction) { 3453 LocationSummary* locations = instruction->GetLocations(); 3454 Register obj = locations->InAt(0).AsRegister<Register>(); 3455 Register cls = locations->InAt(1).AsRegister<Register>(); 3456 Register temp = locations->GetTemp(0).AsRegister<Register>(); 3457 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); 3458 3459 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) TypeCheckSlowPathARM( 3460 instruction, locations->InAt(1), locations->GetTemp(0), instruction->GetDexPc()); 3461 codegen_->AddSlowPath(slow_path); 3462 3463 // TODO: avoid this check if we know obj is not null. 3464 __ cmp(obj, ShifterOperand(0)); 3465 __ b(slow_path->GetExitLabel(), EQ); 3466 // Compare the class of `obj` with `cls`. 3467 __ LoadFromOffset(kLoadWord, temp, obj, class_offset); 3468 __ cmp(temp, ShifterOperand(cls)); 3469 __ b(slow_path->GetEntryLabel(), NE); 3470 __ Bind(slow_path->GetExitLabel()); 3471} 3472 3473void LocationsBuilderARM::VisitMonitorOperation(HMonitorOperation* instruction) { 3474 LocationSummary* locations = 3475 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 3476 InvokeRuntimeCallingConvention calling_convention; 3477 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 3478} 3479 3480void InstructionCodeGeneratorARM::VisitMonitorOperation(HMonitorOperation* instruction) { 3481 codegen_->InvokeRuntime(instruction->IsEnter() 3482 ? QUICK_ENTRY_POINT(pLockObject) : QUICK_ENTRY_POINT(pUnlockObject), 3483 instruction, 3484 instruction->GetDexPc()); 3485} 3486 3487void LocationsBuilderARM::VisitAnd(HAnd* instruction) { HandleBitwiseOperation(instruction); } 3488void LocationsBuilderARM::VisitOr(HOr* instruction) { HandleBitwiseOperation(instruction); } 3489void LocationsBuilderARM::VisitXor(HXor* instruction) { HandleBitwiseOperation(instruction); } 3490 3491void LocationsBuilderARM::HandleBitwiseOperation(HBinaryOperation* instruction) { 3492 LocationSummary* locations = 3493 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 3494 DCHECK(instruction->GetResultType() == Primitive::kPrimInt 3495 || instruction->GetResultType() == Primitive::kPrimLong); 3496 locations->SetInAt(0, Location::RequiresRegister()); 3497 locations->SetInAt(1, Location::RequiresRegister()); 3498 bool output_overlaps = (instruction->GetResultType() == Primitive::kPrimLong); 3499 locations->SetOut(Location::RequiresRegister(), output_overlaps); 3500} 3501 3502void InstructionCodeGeneratorARM::VisitAnd(HAnd* instruction) { 3503 HandleBitwiseOperation(instruction); 3504} 3505 3506void InstructionCodeGeneratorARM::VisitOr(HOr* instruction) { 3507 HandleBitwiseOperation(instruction); 3508} 3509 3510void InstructionCodeGeneratorARM::VisitXor(HXor* instruction) { 3511 HandleBitwiseOperation(instruction); 3512} 3513 3514void InstructionCodeGeneratorARM::HandleBitwiseOperation(HBinaryOperation* instruction) { 3515 LocationSummary* locations = instruction->GetLocations(); 3516 3517 if (instruction->GetResultType() == Primitive::kPrimInt) { 3518 Register first = locations->InAt(0).AsRegister<Register>(); 3519 Register second = locations->InAt(1).AsRegister<Register>(); 3520 Register out = locations->Out().AsRegister<Register>(); 3521 if (instruction->IsAnd()) { 3522 __ and_(out, first, ShifterOperand(second)); 3523 } else if (instruction->IsOr()) { 3524 __ orr(out, first, ShifterOperand(second)); 3525 } else { 3526 DCHECK(instruction->IsXor()); 3527 __ eor(out, first, ShifterOperand(second)); 3528 } 3529 } else { 3530 DCHECK_EQ(instruction->GetResultType(), Primitive::kPrimLong); 3531 Location first = locations->InAt(0); 3532 Location second = locations->InAt(1); 3533 Location out = locations->Out(); 3534 if (instruction->IsAnd()) { 3535 __ and_(out.AsRegisterPairLow<Register>(), 3536 first.AsRegisterPairLow<Register>(), 3537 ShifterOperand(second.AsRegisterPairLow<Register>())); 3538 __ and_(out.AsRegisterPairHigh<Register>(), 3539 first.AsRegisterPairHigh<Register>(), 3540 ShifterOperand(second.AsRegisterPairHigh<Register>())); 3541 } else if (instruction->IsOr()) { 3542 __ orr(out.AsRegisterPairLow<Register>(), 3543 first.AsRegisterPairLow<Register>(), 3544 ShifterOperand(second.AsRegisterPairLow<Register>())); 3545 __ orr(out.AsRegisterPairHigh<Register>(), 3546 first.AsRegisterPairHigh<Register>(), 3547 ShifterOperand(second.AsRegisterPairHigh<Register>())); 3548 } else { 3549 DCHECK(instruction->IsXor()); 3550 __ eor(out.AsRegisterPairLow<Register>(), 3551 first.AsRegisterPairLow<Register>(), 3552 ShifterOperand(second.AsRegisterPairLow<Register>())); 3553 __ eor(out.AsRegisterPairHigh<Register>(), 3554 first.AsRegisterPairHigh<Register>(), 3555 ShifterOperand(second.AsRegisterPairHigh<Register>())); 3556 } 3557 } 3558} 3559 3560} // namespace arm 3561} // namespace art 3562