code_generator_arm.cc revision 2e07b4f0a84a7968b4690c2b1be2e2f75cc6fa8e
1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "code_generator_arm.h" 18 19#include "entrypoints/quick/quick_entrypoints.h" 20#include "gc/accounting/card_table.h" 21#include "mirror/array-inl.h" 22#include "mirror/art_method.h" 23#include "mirror/class.h" 24#include "thread.h" 25#include "utils/assembler.h" 26#include "utils/arm/assembler_arm.h" 27#include "utils/arm/managed_register_arm.h" 28#include "utils/stack_checks.h" 29 30namespace art { 31 32namespace arm { 33 34static SRegister FromDToLowS(DRegister reg) { 35 return static_cast<SRegister>(reg * 2); 36} 37 38static constexpr bool kExplicitStackOverflowCheck = false; 39 40static constexpr int kNumberOfPushedRegistersAtEntry = 1 + 2; // LR, R6, R7 41static constexpr int kCurrentMethodStackOffset = 0; 42 43static constexpr Register kRuntimeParameterCoreRegisters[] = { R0, R1, R2 }; 44static constexpr size_t kRuntimeParameterCoreRegistersLength = 45 arraysize(kRuntimeParameterCoreRegisters); 46static constexpr DRegister kRuntimeParameterFpuRegisters[] = { }; 47static constexpr size_t kRuntimeParameterFpuRegistersLength = 0; 48 49class InvokeRuntimeCallingConvention : public CallingConvention<Register, DRegister> { 50 public: 51 InvokeRuntimeCallingConvention() 52 : CallingConvention(kRuntimeParameterCoreRegisters, 53 kRuntimeParameterCoreRegistersLength, 54 kRuntimeParameterFpuRegisters, 55 kRuntimeParameterFpuRegistersLength) {} 56 57 private: 58 DISALLOW_COPY_AND_ASSIGN(InvokeRuntimeCallingConvention); 59}; 60 61#define __ reinterpret_cast<ArmAssembler*>(codegen->GetAssembler())-> 62 63class SlowPathCodeARM : public SlowPathCode { 64 public: 65 SlowPathCodeARM() : entry_label_(), exit_label_() {} 66 67 Label* GetEntryLabel() { return &entry_label_; } 68 Label* GetExitLabel() { return &exit_label_; } 69 70 private: 71 Label entry_label_; 72 Label exit_label_; 73 74 DISALLOW_COPY_AND_ASSIGN(SlowPathCodeARM); 75}; 76 77class NullCheckSlowPathARM : public SlowPathCodeARM { 78 public: 79 explicit NullCheckSlowPathARM(HNullCheck* instruction) : instruction_(instruction) {} 80 81 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 82 __ Bind(GetEntryLabel()); 83 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowNullPointer).Int32Value(); 84 __ LoadFromOffset(kLoadWord, LR, TR, offset); 85 __ blx(LR); 86 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc()); 87 } 88 89 private: 90 HNullCheck* const instruction_; 91 DISALLOW_COPY_AND_ASSIGN(NullCheckSlowPathARM); 92}; 93 94class StackOverflowCheckSlowPathARM : public SlowPathCodeARM { 95 public: 96 StackOverflowCheckSlowPathARM() {} 97 98 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 99 __ Bind(GetEntryLabel()); 100 __ LoadFromOffset(kLoadWord, PC, TR, 101 QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowStackOverflow).Int32Value()); 102 } 103 104 private: 105 DISALLOW_COPY_AND_ASSIGN(StackOverflowCheckSlowPathARM); 106}; 107 108class SuspendCheckSlowPathARM : public SlowPathCodeARM { 109 public: 110 explicit SuspendCheckSlowPathARM(HSuspendCheck* instruction, HBasicBlock* successor) 111 : instruction_(instruction), successor_(successor) {} 112 113 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 114 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 115 __ Bind(GetEntryLabel()); 116 codegen->SaveLiveRegisters(instruction_->GetLocations()); 117 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pTestSuspend).Int32Value(); 118 __ LoadFromOffset(kLoadWord, LR, TR, offset); 119 __ blx(LR); 120 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc()); 121 codegen->RestoreLiveRegisters(instruction_->GetLocations()); 122 if (successor_ == nullptr) { 123 __ b(GetReturnLabel()); 124 } else { 125 __ b(arm_codegen->GetLabelOf(successor_)); 126 } 127 } 128 129 Label* GetReturnLabel() { 130 DCHECK(successor_ == nullptr); 131 return &return_label_; 132 } 133 134 private: 135 HSuspendCheck* const instruction_; 136 // If not null, the block to branch to after the suspend check. 137 HBasicBlock* const successor_; 138 139 // If `successor_` is null, the label to branch to after the suspend check. 140 Label return_label_; 141 142 DISALLOW_COPY_AND_ASSIGN(SuspendCheckSlowPathARM); 143}; 144 145class BoundsCheckSlowPathARM : public SlowPathCodeARM { 146 public: 147 BoundsCheckSlowPathARM(HBoundsCheck* instruction, 148 Location index_location, 149 Location length_location) 150 : instruction_(instruction), 151 index_location_(index_location), 152 length_location_(length_location) {} 153 154 virtual void EmitNativeCode(CodeGenerator* codegen) OVERRIDE { 155 CodeGeneratorARM* arm_codegen = down_cast<CodeGeneratorARM*>(codegen); 156 __ Bind(GetEntryLabel()); 157 InvokeRuntimeCallingConvention calling_convention; 158 arm_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(0)), index_location_); 159 arm_codegen->Move32(Location::RegisterLocation(calling_convention.GetRegisterAt(1)), length_location_); 160 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pThrowArrayBounds).Int32Value(); 161 __ LoadFromOffset(kLoadWord, LR, TR, offset); 162 __ blx(LR); 163 codegen->RecordPcInfo(instruction_, instruction_->GetDexPc()); 164 } 165 166 private: 167 HBoundsCheck* const instruction_; 168 const Location index_location_; 169 const Location length_location_; 170 171 DISALLOW_COPY_AND_ASSIGN(BoundsCheckSlowPathARM); 172}; 173 174#undef __ 175#define __ reinterpret_cast<ArmAssembler*>(GetAssembler())-> 176 177inline Condition ARMCondition(IfCondition cond) { 178 switch (cond) { 179 case kCondEQ: return EQ; 180 case kCondNE: return NE; 181 case kCondLT: return LT; 182 case kCondLE: return LE; 183 case kCondGT: return GT; 184 case kCondGE: return GE; 185 default: 186 LOG(FATAL) << "Unknown if condition"; 187 } 188 return EQ; // Unreachable. 189} 190 191inline Condition ARMOppositeCondition(IfCondition cond) { 192 switch (cond) { 193 case kCondEQ: return NE; 194 case kCondNE: return EQ; 195 case kCondLT: return GE; 196 case kCondLE: return GT; 197 case kCondGT: return LE; 198 case kCondGE: return LT; 199 default: 200 LOG(FATAL) << "Unknown if condition"; 201 } 202 return EQ; // Unreachable. 203} 204 205void CodeGeneratorARM::DumpCoreRegister(std::ostream& stream, int reg) const { 206 stream << ArmManagedRegister::FromCoreRegister(Register(reg)); 207} 208 209void CodeGeneratorARM::DumpFloatingPointRegister(std::ostream& stream, int reg) const { 210 stream << ArmManagedRegister::FromDRegister(DRegister(reg)); 211} 212 213size_t CodeGeneratorARM::SaveCoreRegister(size_t stack_index, uint32_t reg_id) { 214 __ StoreToOffset(kStoreWord, static_cast<Register>(reg_id), SP, stack_index); 215 return kArmWordSize; 216} 217 218size_t CodeGeneratorARM::RestoreCoreRegister(size_t stack_index, uint32_t reg_id) { 219 __ LoadFromOffset(kLoadWord, static_cast<Register>(reg_id), SP, stack_index); 220 return kArmWordSize; 221} 222 223CodeGeneratorARM::CodeGeneratorARM(HGraph* graph) 224 : CodeGenerator(graph, kNumberOfCoreRegisters, kNumberOfDRegisters, kNumberOfRegisterPairs), 225 block_labels_(graph->GetArena(), 0), 226 location_builder_(graph, this), 227 instruction_visitor_(graph, this), 228 move_resolver_(graph->GetArena(), this), 229 assembler_(true) {} 230 231size_t CodeGeneratorARM::FrameEntrySpillSize() const { 232 return kNumberOfPushedRegistersAtEntry * kArmWordSize; 233} 234 235Location CodeGeneratorARM::AllocateFreeRegister(Primitive::Type type) const { 236 switch (type) { 237 case Primitive::kPrimLong: { 238 size_t reg = FindFreeEntry(blocked_register_pairs_, kNumberOfRegisterPairs); 239 ArmManagedRegister pair = 240 ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(reg)); 241 DCHECK(!blocked_core_registers_[pair.AsRegisterPairLow()]); 242 DCHECK(!blocked_core_registers_[pair.AsRegisterPairHigh()]); 243 244 blocked_core_registers_[pair.AsRegisterPairLow()] = true; 245 blocked_core_registers_[pair.AsRegisterPairHigh()] = true; 246 UpdateBlockedPairRegisters(); 247 return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh()); 248 } 249 250 case Primitive::kPrimByte: 251 case Primitive::kPrimBoolean: 252 case Primitive::kPrimChar: 253 case Primitive::kPrimShort: 254 case Primitive::kPrimInt: 255 case Primitive::kPrimNot: { 256 int reg = FindFreeEntry(blocked_core_registers_, kNumberOfCoreRegisters); 257 // Block all register pairs that contain `reg`. 258 for (int i = 0; i < kNumberOfRegisterPairs; i++) { 259 ArmManagedRegister current = 260 ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i)); 261 if (current.AsRegisterPairLow() == reg || current.AsRegisterPairHigh() == reg) { 262 blocked_register_pairs_[i] = true; 263 } 264 } 265 return Location::RegisterLocation(reg); 266 } 267 268 case Primitive::kPrimFloat: 269 case Primitive::kPrimDouble: { 270 int reg = FindFreeEntry(blocked_fpu_registers_, kNumberOfDRegisters); 271 return Location::FpuRegisterLocation(reg); 272 } 273 274 case Primitive::kPrimVoid: 275 LOG(FATAL) << "Unreachable type " << type; 276 } 277 278 return Location(); 279} 280 281void CodeGeneratorARM::SetupBlockedRegisters() const { 282 // Don't allocate the dalvik style register pair passing. 283 blocked_register_pairs_[R1_R2] = true; 284 285 // Stack register, LR and PC are always reserved. 286 blocked_core_registers_[SP] = true; 287 blocked_core_registers_[LR] = true; 288 blocked_core_registers_[PC] = true; 289 290 // Reserve R4 for suspend check. 291 blocked_core_registers_[R4] = true; 292 293 // Reserve thread register. 294 blocked_core_registers_[TR] = true; 295 296 // Reserve temp register. 297 blocked_core_registers_[IP] = true; 298 299 // TODO: We currently don't use Quick's callee saved registers. 300 // We always save and restore R6 and R7 to make sure we can use three 301 // register pairs for long operations. 302 blocked_core_registers_[R5] = true; 303 blocked_core_registers_[R8] = true; 304 blocked_core_registers_[R10] = true; 305 blocked_core_registers_[R11] = true; 306 307 blocked_fpu_registers_[D8] = true; 308 blocked_fpu_registers_[D9] = true; 309 blocked_fpu_registers_[D10] = true; 310 blocked_fpu_registers_[D11] = true; 311 blocked_fpu_registers_[D12] = true; 312 blocked_fpu_registers_[D13] = true; 313 blocked_fpu_registers_[D14] = true; 314 blocked_fpu_registers_[D15] = true; 315 316 UpdateBlockedPairRegisters(); 317} 318 319void CodeGeneratorARM::UpdateBlockedPairRegisters() const { 320 for (int i = 0; i < kNumberOfRegisterPairs; i++) { 321 ArmManagedRegister current = 322 ArmManagedRegister::FromRegisterPair(static_cast<RegisterPair>(i)); 323 if (blocked_core_registers_[current.AsRegisterPairLow()] 324 || blocked_core_registers_[current.AsRegisterPairHigh()]) { 325 blocked_register_pairs_[i] = true; 326 } 327 } 328} 329 330InstructionCodeGeneratorARM::InstructionCodeGeneratorARM(HGraph* graph, CodeGeneratorARM* codegen) 331 : HGraphVisitor(graph), 332 assembler_(codegen->GetAssembler()), 333 codegen_(codegen) {} 334 335void CodeGeneratorARM::GenerateFrameEntry() { 336 bool skip_overflow_check = IsLeafMethod() && !FrameNeedsStackCheck(GetFrameSize(), InstructionSet::kArm); 337 if (!skip_overflow_check) { 338 if (kExplicitStackOverflowCheck) { 339 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) StackOverflowCheckSlowPathARM(); 340 AddSlowPath(slow_path); 341 342 __ LoadFromOffset(kLoadWord, IP, TR, Thread::StackEndOffset<kArmWordSize>().Int32Value()); 343 __ cmp(SP, ShifterOperand(IP)); 344 __ b(slow_path->GetEntryLabel(), CC); 345 } else { 346 __ AddConstant(IP, SP, -static_cast<int32_t>(GetStackOverflowReservedBytes(kArm))); 347 __ LoadFromOffset(kLoadWord, IP, IP, 0); 348 RecordPcInfo(nullptr, 0); 349 } 350 } 351 352 core_spill_mask_ |= (1 << LR | 1 << R6 | 1 << R7); 353 __ PushList(1 << LR | 1 << R6 | 1 << R7); 354 355 // The return PC has already been pushed on the stack. 356 __ AddConstant(SP, -(GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize)); 357 __ StoreToOffset(kStoreWord, R0, SP, 0); 358} 359 360void CodeGeneratorARM::GenerateFrameExit() { 361 __ AddConstant(SP, GetFrameSize() - kNumberOfPushedRegistersAtEntry * kArmWordSize); 362 __ PopList(1 << PC | 1 << R6 | 1 << R7); 363} 364 365void CodeGeneratorARM::Bind(HBasicBlock* block) { 366 __ Bind(GetLabelOf(block)); 367} 368 369Location CodeGeneratorARM::GetStackLocation(HLoadLocal* load) const { 370 switch (load->GetType()) { 371 case Primitive::kPrimLong: 372 case Primitive::kPrimDouble: 373 return Location::DoubleStackSlot(GetStackSlot(load->GetLocal())); 374 break; 375 376 case Primitive::kPrimInt: 377 case Primitive::kPrimNot: 378 case Primitive::kPrimFloat: 379 return Location::StackSlot(GetStackSlot(load->GetLocal())); 380 381 case Primitive::kPrimBoolean: 382 case Primitive::kPrimByte: 383 case Primitive::kPrimChar: 384 case Primitive::kPrimShort: 385 case Primitive::kPrimVoid: 386 LOG(FATAL) << "Unexpected type " << load->GetType(); 387 } 388 389 LOG(FATAL) << "Unreachable"; 390 return Location(); 391} 392 393Location InvokeDexCallingConventionVisitor::GetNextLocation(Primitive::Type type) { 394 switch (type) { 395 case Primitive::kPrimBoolean: 396 case Primitive::kPrimByte: 397 case Primitive::kPrimChar: 398 case Primitive::kPrimShort: 399 case Primitive::kPrimInt: 400 case Primitive::kPrimFloat: 401 case Primitive::kPrimNot: { 402 uint32_t index = gp_index_++; 403 if (index < calling_convention.GetNumberOfRegisters()) { 404 return Location::RegisterLocation(calling_convention.GetRegisterAt(index)); 405 } else { 406 return Location::StackSlot(calling_convention.GetStackOffsetOf(index)); 407 } 408 } 409 410 case Primitive::kPrimLong: 411 case Primitive::kPrimDouble: { 412 uint32_t index = gp_index_; 413 gp_index_ += 2; 414 if (index + 1 < calling_convention.GetNumberOfRegisters()) { 415 ArmManagedRegister pair = ArmManagedRegister::FromRegisterPair( 416 calling_convention.GetRegisterPairAt(index)); 417 return Location::RegisterPairLocation(pair.AsRegisterPairLow(), pair.AsRegisterPairHigh()); 418 } else if (index + 1 == calling_convention.GetNumberOfRegisters()) { 419 return Location::QuickParameter(index); 420 } else { 421 return Location::DoubleStackSlot(calling_convention.GetStackOffsetOf(index)); 422 } 423 } 424 425 case Primitive::kPrimVoid: 426 LOG(FATAL) << "Unexpected parameter type " << type; 427 break; 428 } 429 return Location(); 430} 431 432void CodeGeneratorARM::Move32(Location destination, Location source) { 433 if (source.Equals(destination)) { 434 return; 435 } 436 if (destination.IsRegister()) { 437 if (source.IsRegister()) { 438 __ Mov(destination.As<Register>(), source.As<Register>()); 439 } else if (source.IsFpuRegister()) { 440 __ vmovrs(destination.As<Register>(), FromDToLowS(source.As<DRegister>())); 441 } else { 442 __ LoadFromOffset(kLoadWord, destination.As<Register>(), SP, source.GetStackIndex()); 443 } 444 } else if (destination.IsFpuRegister()) { 445 if (source.IsRegister()) { 446 __ vmovsr(FromDToLowS(destination.As<DRegister>()), source.As<Register>()); 447 } else if (source.IsFpuRegister()) { 448 __ vmovs(FromDToLowS(destination.As<DRegister>()), FromDToLowS(source.As<DRegister>())); 449 } else { 450 __ vldrs(FromDToLowS(destination.As<DRegister>()), Address(SP, source.GetStackIndex())); 451 } 452 } else { 453 DCHECK(destination.IsStackSlot()); 454 if (source.IsRegister()) { 455 __ StoreToOffset(kStoreWord, source.As<Register>(), SP, destination.GetStackIndex()); 456 } else if (source.IsFpuRegister()) { 457 __ vstrs(FromDToLowS(source.As<DRegister>()), Address(SP, destination.GetStackIndex())); 458 } else { 459 DCHECK(source.IsStackSlot()); 460 __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex()); 461 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 462 } 463 } 464} 465 466void CodeGeneratorARM::Move64(Location destination, Location source) { 467 if (source.Equals(destination)) { 468 return; 469 } 470 if (destination.IsRegisterPair()) { 471 if (source.IsRegisterPair()) { 472 __ Mov(destination.AsRegisterPairLow<Register>(), source.AsRegisterPairLow<Register>()); 473 __ Mov(destination.AsRegisterPairHigh<Register>(), source.AsRegisterPairHigh<Register>()); 474 } else if (source.IsFpuRegister()) { 475 LOG(FATAL) << "Unimplemented"; 476 } else if (source.IsQuickParameter()) { 477 uint32_t argument_index = source.GetQuickParameterIndex(); 478 InvokeDexCallingConvention calling_convention; 479 __ Mov(destination.AsRegisterPairLow<Register>(), 480 calling_convention.GetRegisterAt(argument_index)); 481 __ LoadFromOffset(kLoadWord, destination.AsRegisterPairHigh<Register>(), 482 SP, calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize()); 483 } else { 484 DCHECK(source.IsDoubleStackSlot()); 485 if (destination.AsRegisterPairLow<Register>() == R1) { 486 DCHECK_EQ(destination.AsRegisterPairHigh<Register>(), R2); 487 __ LoadFromOffset(kLoadWord, R1, SP, source.GetStackIndex()); 488 __ LoadFromOffset(kLoadWord, R2, SP, source.GetHighStackIndex(kArmWordSize)); 489 } else { 490 __ LoadFromOffset(kLoadWordPair, destination.AsRegisterPairLow<Register>(), 491 SP, source.GetStackIndex()); 492 } 493 } 494 } else if (destination.IsFpuRegister()) { 495 if (source.IsDoubleStackSlot()) { 496 __ vldrd(destination.As<DRegister>(), Address(SP, source.GetStackIndex())); 497 } else { 498 LOG(FATAL) << "Unimplemented"; 499 } 500 } else if (destination.IsQuickParameter()) { 501 InvokeDexCallingConvention calling_convention; 502 uint32_t argument_index = destination.GetQuickParameterIndex(); 503 if (source.IsRegisterPair()) { 504 __ Mov(calling_convention.GetRegisterAt(argument_index), 505 source.AsRegisterPairLow<Register>()); 506 __ StoreToOffset(kStoreWord, source.AsRegisterPairHigh<Register>(), 507 SP, calling_convention.GetStackOffsetOf(argument_index + 1)); 508 } else if (source.IsFpuRegister()) { 509 LOG(FATAL) << "Unimplemented"; 510 } else { 511 DCHECK(source.IsDoubleStackSlot()); 512 __ LoadFromOffset(kLoadWord, calling_convention.GetRegisterAt(argument_index), SP, source.GetStackIndex()); 513 __ LoadFromOffset(kLoadWord, R0, SP, source.GetHighStackIndex(kArmWordSize)); 514 __ StoreToOffset(kStoreWord, R0, SP, calling_convention.GetStackOffsetOf(argument_index + 1)); 515 } 516 } else { 517 DCHECK(destination.IsDoubleStackSlot()); 518 if (source.IsRegisterPair()) { 519 if (source.AsRegisterPairLow<Register>() == R1) { 520 DCHECK_EQ(source.AsRegisterPairHigh<Register>(), R2); 521 __ StoreToOffset(kStoreWord, R1, SP, destination.GetStackIndex()); 522 __ StoreToOffset(kStoreWord, R2, SP, destination.GetHighStackIndex(kArmWordSize)); 523 } else { 524 __ StoreToOffset(kStoreWordPair, source.AsRegisterPairLow<Register>(), 525 SP, destination.GetStackIndex()); 526 } 527 } else if (source.IsQuickParameter()) { 528 InvokeDexCallingConvention calling_convention; 529 uint32_t argument_index = source.GetQuickParameterIndex(); 530 __ StoreToOffset(kStoreWord, calling_convention.GetRegisterAt(argument_index), 531 SP, destination.GetStackIndex()); 532 __ LoadFromOffset(kLoadWord, R0, 533 SP, calling_convention.GetStackOffsetOf(argument_index + 1) + GetFrameSize()); 534 __ StoreToOffset(kStoreWord, R0, SP, destination.GetHighStackIndex(kArmWordSize)); 535 } else if (source.IsFpuRegister()) { 536 __ vstrd(source.As<DRegister>(), Address(SP, destination.GetStackIndex())); 537 } else { 538 DCHECK(source.IsDoubleStackSlot()); 539 __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex()); 540 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 541 __ LoadFromOffset(kLoadWord, IP, SP, source.GetHighStackIndex(kArmWordSize)); 542 __ StoreToOffset(kStoreWord, IP, SP, destination.GetHighStackIndex(kArmWordSize)); 543 } 544 } 545} 546 547void CodeGeneratorARM::Move(HInstruction* instruction, Location location, HInstruction* move_for) { 548 LocationSummary* locations = instruction->GetLocations(); 549 if (locations != nullptr && locations->Out().Equals(location)) { 550 return; 551 } 552 553 if (instruction->IsIntConstant()) { 554 int32_t value = instruction->AsIntConstant()->GetValue(); 555 if (location.IsRegister()) { 556 __ LoadImmediate(location.As<Register>(), value); 557 } else { 558 DCHECK(location.IsStackSlot()); 559 __ LoadImmediate(IP, value); 560 __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex()); 561 } 562 } else if (instruction->IsLongConstant()) { 563 int64_t value = instruction->AsLongConstant()->GetValue(); 564 if (location.IsRegisterPair()) { 565 __ LoadImmediate(location.AsRegisterPairLow<Register>(), Low32Bits(value)); 566 __ LoadImmediate(location.AsRegisterPairHigh<Register>(), High32Bits(value)); 567 } else { 568 DCHECK(location.IsDoubleStackSlot()); 569 __ LoadImmediate(IP, Low32Bits(value)); 570 __ StoreToOffset(kStoreWord, IP, SP, location.GetStackIndex()); 571 __ LoadImmediate(IP, High32Bits(value)); 572 __ StoreToOffset(kStoreWord, IP, SP, location.GetHighStackIndex(kArmWordSize)); 573 } 574 } else if (instruction->IsLoadLocal()) { 575 uint32_t stack_slot = GetStackSlot(instruction->AsLoadLocal()->GetLocal()); 576 switch (instruction->GetType()) { 577 case Primitive::kPrimBoolean: 578 case Primitive::kPrimByte: 579 case Primitive::kPrimChar: 580 case Primitive::kPrimShort: 581 case Primitive::kPrimInt: 582 case Primitive::kPrimNot: 583 case Primitive::kPrimFloat: 584 Move32(location, Location::StackSlot(stack_slot)); 585 break; 586 587 case Primitive::kPrimLong: 588 case Primitive::kPrimDouble: 589 Move64(location, Location::DoubleStackSlot(stack_slot)); 590 break; 591 592 default: 593 LOG(FATAL) << "Unexpected type " << instruction->GetType(); 594 } 595 } else { 596 DCHECK((instruction->GetNext() == move_for) || instruction->GetNext()->IsTemporary()); 597 switch (instruction->GetType()) { 598 case Primitive::kPrimBoolean: 599 case Primitive::kPrimByte: 600 case Primitive::kPrimChar: 601 case Primitive::kPrimShort: 602 case Primitive::kPrimNot: 603 case Primitive::kPrimInt: 604 case Primitive::kPrimFloat: 605 Move32(location, locations->Out()); 606 break; 607 608 case Primitive::kPrimLong: 609 case Primitive::kPrimDouble: 610 Move64(location, locations->Out()); 611 break; 612 613 default: 614 LOG(FATAL) << "Unexpected type " << instruction->GetType(); 615 } 616 } 617} 618 619void LocationsBuilderARM::VisitGoto(HGoto* got) { 620 got->SetLocations(nullptr); 621} 622 623void InstructionCodeGeneratorARM::VisitGoto(HGoto* got) { 624 HBasicBlock* successor = got->GetSuccessor(); 625 DCHECK(!successor->IsExitBlock()); 626 627 HBasicBlock* block = got->GetBlock(); 628 HInstruction* previous = got->GetPrevious(); 629 630 HLoopInformation* info = block->GetLoopInformation(); 631 if (info != nullptr && info->IsBackEdge(block) && info->HasSuspendCheck()) { 632 codegen_->ClearSpillSlotsFromLoopPhisInStackMap(info->GetSuspendCheck()); 633 GenerateSuspendCheck(info->GetSuspendCheck(), successor); 634 return; 635 } 636 637 if (block->IsEntryBlock() && (previous != nullptr) && previous->IsSuspendCheck()) { 638 GenerateSuspendCheck(previous->AsSuspendCheck(), nullptr); 639 } 640 if (!codegen_->GoesToNextBlock(got->GetBlock(), successor)) { 641 __ b(codegen_->GetLabelOf(successor)); 642 } 643} 644 645void LocationsBuilderARM::VisitExit(HExit* exit) { 646 exit->SetLocations(nullptr); 647} 648 649void InstructionCodeGeneratorARM::VisitExit(HExit* exit) { 650 if (kIsDebugBuild) { 651 __ Comment("Unreachable"); 652 __ bkpt(0); 653 } 654} 655 656void LocationsBuilderARM::VisitIf(HIf* if_instr) { 657 LocationSummary* locations = 658 new (GetGraph()->GetArena()) LocationSummary(if_instr, LocationSummary::kNoCall); 659 HInstruction* cond = if_instr->InputAt(0); 660 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) { 661 locations->SetInAt(0, Location::RequiresRegister()); 662 } 663} 664 665void InstructionCodeGeneratorARM::VisitIf(HIf* if_instr) { 666 HInstruction* cond = if_instr->InputAt(0); 667 if (cond->IsIntConstant()) { 668 // Constant condition, statically compared against 1. 669 int32_t cond_value = cond->AsIntConstant()->GetValue(); 670 if (cond_value == 1) { 671 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), 672 if_instr->IfTrueSuccessor())) { 673 __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor())); 674 } 675 return; 676 } else { 677 DCHECK_EQ(cond_value, 0); 678 } 679 } else { 680 if (!cond->IsCondition() || cond->AsCondition()->NeedsMaterialization()) { 681 // Condition has been materialized, compare the output to 0 682 DCHECK(if_instr->GetLocations()->InAt(0).IsRegister()); 683 __ cmp(if_instr->GetLocations()->InAt(0).As<Register>(), 684 ShifterOperand(0)); 685 __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), NE); 686 } else { 687 // Condition has not been materialized, use its inputs as the 688 // comparison and its condition as the branch condition. 689 LocationSummary* locations = cond->GetLocations(); 690 if (locations->InAt(1).IsRegister()) { 691 __ cmp(locations->InAt(0).As<Register>(), 692 ShifterOperand(locations->InAt(1).As<Register>())); 693 } else { 694 DCHECK(locations->InAt(1).IsConstant()); 695 int32_t value = 696 locations->InAt(1).GetConstant()->AsIntConstant()->GetValue(); 697 ShifterOperand operand; 698 if (ShifterOperand::CanHoldArm(value, &operand)) { 699 __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value)); 700 } else { 701 Register temp = IP; 702 __ LoadImmediate(temp, value); 703 __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp)); 704 } 705 } 706 __ b(codegen_->GetLabelOf(if_instr->IfTrueSuccessor()), 707 ARMCondition(cond->AsCondition()->GetCondition())); 708 } 709 } 710 if (!codegen_->GoesToNextBlock(if_instr->GetBlock(), 711 if_instr->IfFalseSuccessor())) { 712 __ b(codegen_->GetLabelOf(if_instr->IfFalseSuccessor())); 713 } 714} 715 716 717void LocationsBuilderARM::VisitCondition(HCondition* comp) { 718 LocationSummary* locations = 719 new (GetGraph()->GetArena()) LocationSummary(comp, LocationSummary::kNoCall); 720 locations->SetInAt(0, Location::RequiresRegister()); 721 locations->SetInAt(1, Location::RegisterOrConstant(comp->InputAt(1))); 722 if (comp->NeedsMaterialization()) { 723 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 724 } 725} 726 727void InstructionCodeGeneratorARM::VisitCondition(HCondition* comp) { 728 if (!comp->NeedsMaterialization()) return; 729 730 LocationSummary* locations = comp->GetLocations(); 731 if (locations->InAt(1).IsRegister()) { 732 __ cmp(locations->InAt(0).As<Register>(), 733 ShifterOperand(locations->InAt(1).As<Register>())); 734 } else { 735 DCHECK(locations->InAt(1).IsConstant()); 736 int32_t value = locations->InAt(1).GetConstant()->AsIntConstant()->GetValue(); 737 ShifterOperand operand; 738 if (ShifterOperand::CanHoldArm(value, &operand)) { 739 __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(value)); 740 } else { 741 Register temp = IP; 742 __ LoadImmediate(temp, value); 743 __ cmp(locations->InAt(0).As<Register>(), ShifterOperand(temp)); 744 } 745 } 746 __ it(ARMCondition(comp->GetCondition()), kItElse); 747 __ mov(locations->Out().As<Register>(), ShifterOperand(1), 748 ARMCondition(comp->GetCondition())); 749 __ mov(locations->Out().As<Register>(), ShifterOperand(0), 750 ARMOppositeCondition(comp->GetCondition())); 751} 752 753void LocationsBuilderARM::VisitEqual(HEqual* comp) { 754 VisitCondition(comp); 755} 756 757void InstructionCodeGeneratorARM::VisitEqual(HEqual* comp) { 758 VisitCondition(comp); 759} 760 761void LocationsBuilderARM::VisitNotEqual(HNotEqual* comp) { 762 VisitCondition(comp); 763} 764 765void InstructionCodeGeneratorARM::VisitNotEqual(HNotEqual* comp) { 766 VisitCondition(comp); 767} 768 769void LocationsBuilderARM::VisitLessThan(HLessThan* comp) { 770 VisitCondition(comp); 771} 772 773void InstructionCodeGeneratorARM::VisitLessThan(HLessThan* comp) { 774 VisitCondition(comp); 775} 776 777void LocationsBuilderARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) { 778 VisitCondition(comp); 779} 780 781void InstructionCodeGeneratorARM::VisitLessThanOrEqual(HLessThanOrEqual* comp) { 782 VisitCondition(comp); 783} 784 785void LocationsBuilderARM::VisitGreaterThan(HGreaterThan* comp) { 786 VisitCondition(comp); 787} 788 789void InstructionCodeGeneratorARM::VisitGreaterThan(HGreaterThan* comp) { 790 VisitCondition(comp); 791} 792 793void LocationsBuilderARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) { 794 VisitCondition(comp); 795} 796 797void InstructionCodeGeneratorARM::VisitGreaterThanOrEqual(HGreaterThanOrEqual* comp) { 798 VisitCondition(comp); 799} 800 801void LocationsBuilderARM::VisitLocal(HLocal* local) { 802 local->SetLocations(nullptr); 803} 804 805void InstructionCodeGeneratorARM::VisitLocal(HLocal* local) { 806 DCHECK_EQ(local->GetBlock(), GetGraph()->GetEntryBlock()); 807} 808 809void LocationsBuilderARM::VisitLoadLocal(HLoadLocal* load) { 810 load->SetLocations(nullptr); 811} 812 813void InstructionCodeGeneratorARM::VisitLoadLocal(HLoadLocal* load) { 814 // Nothing to do, this is driven by the code generator. 815} 816 817void LocationsBuilderARM::VisitStoreLocal(HStoreLocal* store) { 818 LocationSummary* locations = 819 new (GetGraph()->GetArena()) LocationSummary(store, LocationSummary::kNoCall); 820 switch (store->InputAt(1)->GetType()) { 821 case Primitive::kPrimBoolean: 822 case Primitive::kPrimByte: 823 case Primitive::kPrimChar: 824 case Primitive::kPrimShort: 825 case Primitive::kPrimInt: 826 case Primitive::kPrimNot: 827 case Primitive::kPrimFloat: 828 locations->SetInAt(1, Location::StackSlot(codegen_->GetStackSlot(store->GetLocal()))); 829 break; 830 831 case Primitive::kPrimLong: 832 case Primitive::kPrimDouble: 833 locations->SetInAt(1, Location::DoubleStackSlot(codegen_->GetStackSlot(store->GetLocal()))); 834 break; 835 836 default: 837 LOG(FATAL) << "Unexpected local type " << store->InputAt(1)->GetType(); 838 } 839} 840 841void InstructionCodeGeneratorARM::VisitStoreLocal(HStoreLocal* store) { 842} 843 844void LocationsBuilderARM::VisitIntConstant(HIntConstant* constant) { 845 LocationSummary* locations = 846 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 847 locations->SetOut(Location::ConstantLocation(constant)); 848} 849 850void InstructionCodeGeneratorARM::VisitIntConstant(HIntConstant* constant) { 851 // Will be generated at use site. 852} 853 854void LocationsBuilderARM::VisitLongConstant(HLongConstant* constant) { 855 LocationSummary* locations = 856 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 857 locations->SetOut(Location::ConstantLocation(constant)); 858} 859 860void InstructionCodeGeneratorARM::VisitLongConstant(HLongConstant* constant) { 861 // Will be generated at use site. 862} 863 864void LocationsBuilderARM::VisitFloatConstant(HFloatConstant* constant) { 865 LocationSummary* locations = 866 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 867 locations->SetOut(Location::ConstantLocation(constant)); 868} 869 870void InstructionCodeGeneratorARM::VisitFloatConstant(HFloatConstant* constant) { 871 // Will be generated at use site. 872} 873 874void LocationsBuilderARM::VisitDoubleConstant(HDoubleConstant* constant) { 875 LocationSummary* locations = 876 new (GetGraph()->GetArena()) LocationSummary(constant, LocationSummary::kNoCall); 877 locations->SetOut(Location::ConstantLocation(constant)); 878} 879 880void InstructionCodeGeneratorARM::VisitDoubleConstant(HDoubleConstant* constant) { 881 // Will be generated at use site. 882} 883 884void LocationsBuilderARM::VisitReturnVoid(HReturnVoid* ret) { 885 ret->SetLocations(nullptr); 886} 887 888void InstructionCodeGeneratorARM::VisitReturnVoid(HReturnVoid* ret) { 889 codegen_->GenerateFrameExit(); 890} 891 892void LocationsBuilderARM::VisitReturn(HReturn* ret) { 893 LocationSummary* locations = 894 new (GetGraph()->GetArena()) LocationSummary(ret, LocationSummary::kNoCall); 895 switch (ret->InputAt(0)->GetType()) { 896 case Primitive::kPrimBoolean: 897 case Primitive::kPrimByte: 898 case Primitive::kPrimChar: 899 case Primitive::kPrimShort: 900 case Primitive::kPrimInt: 901 case Primitive::kPrimNot: 902 case Primitive::kPrimFloat: 903 locations->SetInAt(0, Location::RegisterLocation(R0)); 904 break; 905 906 case Primitive::kPrimLong: 907 case Primitive::kPrimDouble: 908 locations->SetInAt(0, Location::RegisterPairLocation(R0, R1)); 909 break; 910 911 default: 912 LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType(); 913 } 914} 915 916void InstructionCodeGeneratorARM::VisitReturn(HReturn* ret) { 917 if (kIsDebugBuild) { 918 switch (ret->InputAt(0)->GetType()) { 919 case Primitive::kPrimBoolean: 920 case Primitive::kPrimByte: 921 case Primitive::kPrimChar: 922 case Primitive::kPrimShort: 923 case Primitive::kPrimInt: 924 case Primitive::kPrimNot: 925 case Primitive::kPrimFloat: 926 DCHECK_EQ(ret->GetLocations()->InAt(0).As<Register>(), R0); 927 break; 928 929 case Primitive::kPrimLong: 930 case Primitive::kPrimDouble: 931 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairLow<Register>(), R0); 932 DCHECK_EQ(ret->GetLocations()->InAt(0).AsRegisterPairHigh<Register>(), R1); 933 break; 934 935 default: 936 LOG(FATAL) << "Unimplemented return type " << ret->InputAt(0)->GetType(); 937 } 938 } 939 codegen_->GenerateFrameExit(); 940} 941 942void LocationsBuilderARM::VisitInvokeStatic(HInvokeStatic* invoke) { 943 HandleInvoke(invoke); 944} 945 946void InstructionCodeGeneratorARM::LoadCurrentMethod(Register reg) { 947 __ LoadFromOffset(kLoadWord, reg, SP, kCurrentMethodStackOffset); 948} 949 950void InstructionCodeGeneratorARM::VisitInvokeStatic(HInvokeStatic* invoke) { 951 Register temp = invoke->GetLocations()->GetTemp(0).As<Register>(); 952 uint32_t heap_reference_size = sizeof(mirror::HeapReference<mirror::Object>); 953 size_t index_in_cache = mirror::Array::DataOffset(heap_reference_size).Int32Value() + 954 invoke->GetIndexInDexCache() * kArmWordSize; 955 956 // TODO: Implement all kinds of calls: 957 // 1) boot -> boot 958 // 2) app -> boot 959 // 3) app -> app 960 // 961 // Currently we implement the app -> app logic, which looks up in the resolve cache. 962 963 // temp = method; 964 LoadCurrentMethod(temp); 965 // temp = temp->dex_cache_resolved_methods_; 966 __ LoadFromOffset(kLoadWord, temp, temp, mirror::ArtMethod::DexCacheResolvedMethodsOffset().Int32Value()); 967 // temp = temp[index_in_cache] 968 __ LoadFromOffset(kLoadWord, temp, temp, index_in_cache); 969 // LR = temp[offset_of_quick_compiled_code] 970 __ LoadFromOffset(kLoadWord, LR, temp, 971 mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value()); 972 // LR() 973 __ blx(LR); 974 975 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 976 DCHECK(!codegen_->IsLeafMethod()); 977} 978 979void LocationsBuilderARM::VisitInvokeVirtual(HInvokeVirtual* invoke) { 980 HandleInvoke(invoke); 981} 982 983void LocationsBuilderARM::HandleInvoke(HInvoke* invoke) { 984 LocationSummary* locations = 985 new (GetGraph()->GetArena()) LocationSummary(invoke, LocationSummary::kCall); 986 locations->AddTemp(Location::RegisterLocation(R0)); 987 988 InvokeDexCallingConventionVisitor calling_convention_visitor; 989 for (size_t i = 0; i < invoke->InputCount(); i++) { 990 HInstruction* input = invoke->InputAt(i); 991 locations->SetInAt(i, calling_convention_visitor.GetNextLocation(input->GetType())); 992 } 993 994 switch (invoke->GetType()) { 995 case Primitive::kPrimBoolean: 996 case Primitive::kPrimByte: 997 case Primitive::kPrimChar: 998 case Primitive::kPrimShort: 999 case Primitive::kPrimInt: 1000 case Primitive::kPrimNot: 1001 case Primitive::kPrimFloat: 1002 locations->SetOut(Location::RegisterLocation(R0)); 1003 break; 1004 1005 case Primitive::kPrimLong: 1006 case Primitive::kPrimDouble: 1007 locations->SetOut(Location::RegisterPairLocation(R0, R1)); 1008 break; 1009 1010 case Primitive::kPrimVoid: 1011 break; 1012 } 1013} 1014 1015 1016void InstructionCodeGeneratorARM::VisitInvokeVirtual(HInvokeVirtual* invoke) { 1017 Register temp = invoke->GetLocations()->GetTemp(0).As<Register>(); 1018 uint32_t method_offset = mirror::Class::EmbeddedVTableOffset().Uint32Value() + 1019 invoke->GetVTableIndex() * sizeof(mirror::Class::VTableEntry); 1020 LocationSummary* locations = invoke->GetLocations(); 1021 Location receiver = locations->InAt(0); 1022 uint32_t class_offset = mirror::Object::ClassOffset().Int32Value(); 1023 // temp = object->GetClass(); 1024 if (receiver.IsStackSlot()) { 1025 __ LoadFromOffset(kLoadWord, temp, SP, receiver.GetStackIndex()); 1026 __ LoadFromOffset(kLoadWord, temp, temp, class_offset); 1027 } else { 1028 __ LoadFromOffset(kLoadWord, temp, receiver.As<Register>(), class_offset); 1029 } 1030 // temp = temp->GetMethodAt(method_offset); 1031 uint32_t entry_point = mirror::ArtMethod::EntryPointFromQuickCompiledCodeOffset().Int32Value(); 1032 __ LoadFromOffset(kLoadWord, temp, temp, method_offset); 1033 // LR = temp->GetEntryPoint(); 1034 __ LoadFromOffset(kLoadWord, LR, temp, entry_point); 1035 // LR(); 1036 __ blx(LR); 1037 DCHECK(!codegen_->IsLeafMethod()); 1038 codegen_->RecordPcInfo(invoke, invoke->GetDexPc()); 1039} 1040 1041void LocationsBuilderARM::VisitNeg(HNeg* neg) { 1042 LocationSummary* locations = 1043 new (GetGraph()->GetArena()) LocationSummary(neg, LocationSummary::kNoCall); 1044 switch (neg->GetResultType()) { 1045 case Primitive::kPrimInt: 1046 case Primitive::kPrimLong: { 1047 bool output_overlaps = (neg->GetResultType() == Primitive::kPrimLong); 1048 locations->SetInAt(0, Location::RequiresRegister()); 1049 locations->SetOut(Location::RequiresRegister(), output_overlaps); 1050 break; 1051 } 1052 1053 case Primitive::kPrimFloat: 1054 case Primitive::kPrimDouble: 1055 LOG(FATAL) << "Not yet implemented neg type " << neg->GetResultType(); 1056 break; 1057 1058 default: 1059 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType(); 1060 } 1061} 1062 1063void InstructionCodeGeneratorARM::VisitNeg(HNeg* neg) { 1064 LocationSummary* locations = neg->GetLocations(); 1065 Location out = locations->Out(); 1066 Location in = locations->InAt(0); 1067 switch (neg->GetResultType()) { 1068 case Primitive::kPrimInt: 1069 DCHECK(in.IsRegister()); 1070 __ rsb(out.As<Register>(), in.As<Register>(), ShifterOperand(0)); 1071 break; 1072 1073 case Primitive::kPrimLong: 1074 DCHECK(in.IsRegisterPair()); 1075 // out.lo = 0 - in.lo (and update the carry/borrow (C) flag) 1076 __ rsbs(out.AsRegisterPairLow<Register>(), 1077 in.AsRegisterPairLow<Register>(), 1078 ShifterOperand(0)); 1079 // We cannot emit an RSC (Reverse Subtract with Carry) 1080 // instruction here, as it does not exist in the Thumb-2 1081 // instruction set. We use the following approach 1082 // using SBC and SUB instead. 1083 // 1084 // out.hi = -C 1085 __ sbc(out.AsRegisterPairHigh<Register>(), 1086 out.AsRegisterPairHigh<Register>(), 1087 ShifterOperand(out.AsRegisterPairHigh<Register>())); 1088 // out.hi = out.hi - in.hi 1089 __ sub(out.AsRegisterPairHigh<Register>(), 1090 out.AsRegisterPairHigh<Register>(), 1091 ShifterOperand(in.AsRegisterPairHigh<Register>())); 1092 break; 1093 1094 case Primitive::kPrimFloat: 1095 case Primitive::kPrimDouble: 1096 LOG(FATAL) << "Not yet implemented neg type " << neg->GetResultType(); 1097 break; 1098 1099 default: 1100 LOG(FATAL) << "Unexpected neg type " << neg->GetResultType(); 1101 } 1102} 1103 1104void LocationsBuilderARM::VisitAdd(HAdd* add) { 1105 LocationSummary* locations = 1106 new (GetGraph()->GetArena()) LocationSummary(add, LocationSummary::kNoCall); 1107 switch (add->GetResultType()) { 1108 case Primitive::kPrimInt: 1109 case Primitive::kPrimLong: { 1110 bool output_overlaps = (add->GetResultType() == Primitive::kPrimLong); 1111 locations->SetInAt(0, Location::RequiresRegister()); 1112 locations->SetInAt(1, Location::RegisterOrConstant(add->InputAt(1))); 1113 locations->SetOut(Location::RequiresRegister(), output_overlaps); 1114 break; 1115 } 1116 1117 case Primitive::kPrimFloat: 1118 case Primitive::kPrimDouble: { 1119 locations->SetInAt(0, Location::RequiresFpuRegister()); 1120 locations->SetInAt(1, Location::RequiresFpuRegister()); 1121 locations->SetOut(Location::RequiresFpuRegister()); 1122 break; 1123 } 1124 1125 default: 1126 LOG(FATAL) << "Unexpected add type " << add->GetResultType(); 1127 } 1128} 1129 1130void InstructionCodeGeneratorARM::VisitAdd(HAdd* add) { 1131 LocationSummary* locations = add->GetLocations(); 1132 Location out = locations->Out(); 1133 Location first = locations->InAt(0); 1134 Location second = locations->InAt(1); 1135 switch (add->GetResultType()) { 1136 case Primitive::kPrimInt: 1137 if (second.IsRegister()) { 1138 __ add(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>())); 1139 } else { 1140 __ AddConstant(out.As<Register>(), 1141 first.As<Register>(), 1142 second.GetConstant()->AsIntConstant()->GetValue()); 1143 } 1144 break; 1145 1146 case Primitive::kPrimLong: 1147 __ adds(out.AsRegisterPairLow<Register>(), 1148 first.AsRegisterPairLow<Register>(), 1149 ShifterOperand(second.AsRegisterPairLow<Register>())); 1150 __ adc(out.AsRegisterPairHigh<Register>(), 1151 first.AsRegisterPairHigh<Register>(), 1152 ShifterOperand(second.AsRegisterPairHigh<Register>())); 1153 break; 1154 1155 case Primitive::kPrimFloat: 1156 __ vadds(FromDToLowS(out.As<DRegister>()), 1157 FromDToLowS(first.As<DRegister>()), 1158 FromDToLowS(second.As<DRegister>())); 1159 break; 1160 1161 case Primitive::kPrimDouble: 1162 __ vaddd(out.As<DRegister>(), first.As<DRegister>(), second.As<DRegister>()); 1163 break; 1164 1165 default: 1166 LOG(FATAL) << "Unexpected add type " << add->GetResultType(); 1167 } 1168} 1169 1170void LocationsBuilderARM::VisitSub(HSub* sub) { 1171 LocationSummary* locations = 1172 new (GetGraph()->GetArena()) LocationSummary(sub, LocationSummary::kNoCall); 1173 switch (sub->GetResultType()) { 1174 case Primitive::kPrimInt: 1175 case Primitive::kPrimLong: { 1176 bool output_overlaps = (sub->GetResultType() == Primitive::kPrimLong); 1177 locations->SetInAt(0, Location::RequiresRegister()); 1178 locations->SetInAt(1, Location::RegisterOrConstant(sub->InputAt(1))); 1179 locations->SetOut(Location::RequiresRegister(), output_overlaps); 1180 break; 1181 } 1182 case Primitive::kPrimFloat: 1183 case Primitive::kPrimDouble: { 1184 locations->SetInAt(0, Location::RequiresFpuRegister()); 1185 locations->SetInAt(1, Location::RequiresFpuRegister()); 1186 locations->SetOut(Location::RequiresFpuRegister()); 1187 break; 1188 } 1189 default: 1190 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType(); 1191 } 1192} 1193 1194void InstructionCodeGeneratorARM::VisitSub(HSub* sub) { 1195 LocationSummary* locations = sub->GetLocations(); 1196 Location out = locations->Out(); 1197 Location first = locations->InAt(0); 1198 Location second = locations->InAt(1); 1199 switch (sub->GetResultType()) { 1200 case Primitive::kPrimInt: { 1201 if (second.IsRegister()) { 1202 __ sub(out.As<Register>(), first.As<Register>(), ShifterOperand(second.As<Register>())); 1203 } else { 1204 __ AddConstant(out.As<Register>(), 1205 first.As<Register>(), 1206 -second.GetConstant()->AsIntConstant()->GetValue()); 1207 } 1208 break; 1209 } 1210 1211 case Primitive::kPrimLong: { 1212 __ subs(out.AsRegisterPairLow<Register>(), 1213 first.AsRegisterPairLow<Register>(), 1214 ShifterOperand(second.AsRegisterPairLow<Register>())); 1215 __ sbc(out.AsRegisterPairHigh<Register>(), 1216 first.AsRegisterPairHigh<Register>(), 1217 ShifterOperand(second.AsRegisterPairHigh<Register>())); 1218 break; 1219 } 1220 1221 case Primitive::kPrimFloat: { 1222 __ vsubs(FromDToLowS(out.As<DRegister>()), 1223 FromDToLowS(first.As<DRegister>()), 1224 FromDToLowS(second.As<DRegister>())); 1225 break; 1226 } 1227 1228 case Primitive::kPrimDouble: { 1229 __ vsubd(out.As<DRegister>(), first.As<DRegister>(), second.As<DRegister>()); 1230 break; 1231 } 1232 1233 1234 default: 1235 LOG(FATAL) << "Unexpected sub type " << sub->GetResultType(); 1236 } 1237} 1238 1239void LocationsBuilderARM::VisitMul(HMul* mul) { 1240 LocationSummary* locations = 1241 new (GetGraph()->GetArena()) LocationSummary(mul, LocationSummary::kNoCall); 1242 switch (mul->GetResultType()) { 1243 case Primitive::kPrimInt: 1244 case Primitive::kPrimLong: { 1245 locations->SetInAt(0, Location::RequiresRegister()); 1246 locations->SetInAt(1, Location::RequiresRegister()); 1247 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1248 break; 1249 } 1250 1251 case Primitive::kPrimFloat: 1252 case Primitive::kPrimDouble: { 1253 locations->SetInAt(0, Location::RequiresFpuRegister()); 1254 locations->SetInAt(1, Location::RequiresFpuRegister()); 1255 locations->SetOut(Location::RequiresFpuRegister()); 1256 break; 1257 } 1258 1259 default: 1260 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType(); 1261 } 1262} 1263 1264void InstructionCodeGeneratorARM::VisitMul(HMul* mul) { 1265 LocationSummary* locations = mul->GetLocations(); 1266 Location out = locations->Out(); 1267 Location first = locations->InAt(0); 1268 Location second = locations->InAt(1); 1269 switch (mul->GetResultType()) { 1270 case Primitive::kPrimInt: { 1271 __ mul(out.As<Register>(), first.As<Register>(), second.As<Register>()); 1272 break; 1273 } 1274 case Primitive::kPrimLong: { 1275 Register out_hi = out.AsRegisterPairHigh<Register>(); 1276 Register out_lo = out.AsRegisterPairLow<Register>(); 1277 Register in1_hi = first.AsRegisterPairHigh<Register>(); 1278 Register in1_lo = first.AsRegisterPairLow<Register>(); 1279 Register in2_hi = second.AsRegisterPairHigh<Register>(); 1280 Register in2_lo = second.AsRegisterPairLow<Register>(); 1281 1282 // Extra checks to protect caused by the existence of R1_R2. 1283 // The algorithm is wrong if out.hi is either in1.lo or in2.lo: 1284 // (e.g. in1=r0_r1, in2=r2_r3 and out=r1_r2); 1285 DCHECK_NE(out_hi, in1_lo); 1286 DCHECK_NE(out_hi, in2_lo); 1287 1288 // input: in1 - 64 bits, in2 - 64 bits 1289 // output: out 1290 // formula: out.hi : out.lo = (in1.lo * in2.hi + in1.hi * in2.lo)* 2^32 + in1.lo * in2.lo 1291 // parts: out.hi = in1.lo * in2.hi + in1.hi * in2.lo + (in1.lo * in2.lo)[63:32] 1292 // parts: out.lo = (in1.lo * in2.lo)[31:0] 1293 1294 // IP <- in1.lo * in2.hi 1295 __ mul(IP, in1_lo, in2_hi); 1296 // out.hi <- in1.lo * in2.hi + in1.hi * in2.lo 1297 __ mla(out_hi, in1_hi, in2_lo, IP); 1298 // out.lo <- (in1.lo * in2.lo)[31:0]; 1299 __ umull(out_lo, IP, in1_lo, in2_lo); 1300 // out.hi <- in2.hi * in1.lo + in2.lo * in1.hi + (in1.lo * in2.lo)[63:32] 1301 __ add(out_hi, out_hi, ShifterOperand(IP)); 1302 break; 1303 } 1304 1305 case Primitive::kPrimFloat: { 1306 __ vmuls(FromDToLowS(out.As<DRegister>()), 1307 FromDToLowS(first.As<DRegister>()), 1308 FromDToLowS(second.As<DRegister>())); 1309 break; 1310 } 1311 1312 case Primitive::kPrimDouble: { 1313 __ vmuld(out.As<DRegister>(), first.As<DRegister>(), second.As<DRegister>()); 1314 break; 1315 } 1316 1317 default: 1318 LOG(FATAL) << "Unexpected mul type " << mul->GetResultType(); 1319 } 1320} 1321 1322void LocationsBuilderARM::VisitNewInstance(HNewInstance* instruction) { 1323 LocationSummary* locations = 1324 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 1325 InvokeRuntimeCallingConvention calling_convention; 1326 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 1327 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 1328 locations->SetOut(Location::RegisterLocation(R0)); 1329} 1330 1331void InstructionCodeGeneratorARM::VisitNewInstance(HNewInstance* instruction) { 1332 InvokeRuntimeCallingConvention calling_convention; 1333 LoadCurrentMethod(calling_convention.GetRegisterAt(1)); 1334 __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex()); 1335 1336 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocObjectWithAccessCheck).Int32Value(); 1337 __ LoadFromOffset(kLoadWord, LR, TR, offset); 1338 __ blx(LR); 1339 1340 codegen_->RecordPcInfo(instruction, instruction->GetDexPc()); 1341 DCHECK(!codegen_->IsLeafMethod()); 1342} 1343 1344void LocationsBuilderARM::VisitNewArray(HNewArray* instruction) { 1345 LocationSummary* locations = 1346 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCall); 1347 InvokeRuntimeCallingConvention calling_convention; 1348 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 1349 locations->AddTemp(Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 1350 locations->SetOut(Location::RegisterLocation(R0)); 1351 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); 1352} 1353 1354void InstructionCodeGeneratorARM::VisitNewArray(HNewArray* instruction) { 1355 InvokeRuntimeCallingConvention calling_convention; 1356 LoadCurrentMethod(calling_convention.GetRegisterAt(1)); 1357 __ LoadImmediate(calling_convention.GetRegisterAt(0), instruction->GetTypeIndex()); 1358 1359 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAllocArrayWithAccessCheck).Int32Value(); 1360 __ LoadFromOffset(kLoadWord, LR, TR, offset); 1361 __ blx(LR); 1362 1363 codegen_->RecordPcInfo(instruction, instruction->GetDexPc()); 1364 DCHECK(!codegen_->IsLeafMethod()); 1365} 1366 1367void LocationsBuilderARM::VisitParameterValue(HParameterValue* instruction) { 1368 LocationSummary* locations = 1369 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1370 Location location = parameter_visitor_.GetNextLocation(instruction->GetType()); 1371 if (location.IsStackSlot()) { 1372 location = Location::StackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); 1373 } else if (location.IsDoubleStackSlot()) { 1374 location = Location::DoubleStackSlot(location.GetStackIndex() + codegen_->GetFrameSize()); 1375 } 1376 locations->SetOut(location); 1377} 1378 1379void InstructionCodeGeneratorARM::VisitParameterValue(HParameterValue* instruction) { 1380 // Nothing to do, the parameter is already at its location. 1381} 1382 1383void LocationsBuilderARM::VisitNot(HNot* not_) { 1384 LocationSummary* locations = 1385 new (GetGraph()->GetArena()) LocationSummary(not_, LocationSummary::kNoCall); 1386 locations->SetInAt(0, Location::RequiresRegister()); 1387 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1388} 1389 1390void InstructionCodeGeneratorARM::VisitNot(HNot* not_) { 1391 LocationSummary* locations = not_->GetLocations(); 1392 Location out = locations->Out(); 1393 Location in = locations->InAt(0); 1394 switch (not_->InputAt(0)->GetType()) { 1395 case Primitive::kPrimBoolean: 1396 __ eor(out.As<Register>(), in.As<Register>(), ShifterOperand(1)); 1397 break; 1398 1399 case Primitive::kPrimInt: 1400 __ mvn(out.As<Register>(), ShifterOperand(in.As<Register>())); 1401 break; 1402 1403 case Primitive::kPrimLong: 1404 LOG(FATAL) << "Not yet implemented type for not operation " << not_->GetResultType(); 1405 break; 1406 1407 default: 1408 LOG(FATAL) << "Unimplemented type for not operation " << not_->GetResultType(); 1409 } 1410} 1411 1412void LocationsBuilderARM::VisitCompare(HCompare* compare) { 1413 LocationSummary* locations = 1414 new (GetGraph()->GetArena()) LocationSummary(compare, LocationSummary::kNoCall); 1415 locations->SetInAt(0, Location::RequiresRegister()); 1416 locations->SetInAt(1, Location::RequiresRegister()); 1417 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1418} 1419 1420void InstructionCodeGeneratorARM::VisitCompare(HCompare* compare) { 1421 Label greater, done; 1422 LocationSummary* locations = compare->GetLocations(); 1423 switch (compare->InputAt(0)->GetType()) { 1424 case Primitive::kPrimLong: { 1425 Register output = locations->Out().As<Register>(); 1426 Location left = locations->InAt(0); 1427 Location right = locations->InAt(1); 1428 Label less, greater, done; 1429 __ cmp(left.AsRegisterPairHigh<Register>(), 1430 ShifterOperand(right.AsRegisterPairHigh<Register>())); // Signed compare. 1431 __ b(&less, LT); 1432 __ b(&greater, GT); 1433 // Do LoadImmediate before any `cmp`, as LoadImmediate might affect 1434 // the status flags. 1435 __ LoadImmediate(output, 0); 1436 __ cmp(left.AsRegisterPairLow<Register>(), 1437 ShifterOperand(right.AsRegisterPairLow<Register>())); // Unsigned compare. 1438 __ b(&done, EQ); 1439 __ b(&less, CC); 1440 1441 __ Bind(&greater); 1442 __ LoadImmediate(output, 1); 1443 __ b(&done); 1444 1445 __ Bind(&less); 1446 __ LoadImmediate(output, -1); 1447 1448 __ Bind(&done); 1449 break; 1450 } 1451 default: 1452 LOG(FATAL) << "Unimplemented compare type " << compare->InputAt(0)->GetType(); 1453 } 1454} 1455 1456void LocationsBuilderARM::VisitPhi(HPhi* instruction) { 1457 LocationSummary* locations = 1458 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1459 for (size_t i = 0, e = instruction->InputCount(); i < e; ++i) { 1460 locations->SetInAt(i, Location::Any()); 1461 } 1462 locations->SetOut(Location::Any()); 1463} 1464 1465void InstructionCodeGeneratorARM::VisitPhi(HPhi* instruction) { 1466 LOG(FATAL) << "Unreachable"; 1467} 1468 1469void LocationsBuilderARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) { 1470 LocationSummary* locations = 1471 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1472 bool is_object_type = instruction->GetFieldType() == Primitive::kPrimNot; 1473 locations->SetInAt(0, Location::RequiresRegister()); 1474 locations->SetInAt(1, Location::RequiresRegister()); 1475 // Temporary registers for the write barrier. 1476 if (is_object_type) { 1477 locations->AddTemp(Location::RequiresRegister()); 1478 locations->AddTemp(Location::RequiresRegister()); 1479 } 1480} 1481 1482void InstructionCodeGeneratorARM::VisitInstanceFieldSet(HInstanceFieldSet* instruction) { 1483 LocationSummary* locations = instruction->GetLocations(); 1484 Register obj = locations->InAt(0).As<Register>(); 1485 uint32_t offset = instruction->GetFieldOffset().Uint32Value(); 1486 Primitive::Type field_type = instruction->GetFieldType(); 1487 1488 switch (field_type) { 1489 case Primitive::kPrimBoolean: 1490 case Primitive::kPrimByte: { 1491 Register value = locations->InAt(1).As<Register>(); 1492 __ StoreToOffset(kStoreByte, value, obj, offset); 1493 break; 1494 } 1495 1496 case Primitive::kPrimShort: 1497 case Primitive::kPrimChar: { 1498 Register value = locations->InAt(1).As<Register>(); 1499 __ StoreToOffset(kStoreHalfword, value, obj, offset); 1500 break; 1501 } 1502 1503 case Primitive::kPrimInt: 1504 case Primitive::kPrimNot: { 1505 Register value = locations->InAt(1).As<Register>(); 1506 __ StoreToOffset(kStoreWord, value, obj, offset); 1507 if (field_type == Primitive::kPrimNot) { 1508 Register temp = locations->GetTemp(0).As<Register>(); 1509 Register card = locations->GetTemp(1).As<Register>(); 1510 codegen_->MarkGCCard(temp, card, obj, value); 1511 } 1512 break; 1513 } 1514 1515 case Primitive::kPrimLong: { 1516 Location value = locations->InAt(1); 1517 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset); 1518 break; 1519 } 1520 1521 case Primitive::kPrimFloat: 1522 case Primitive::kPrimDouble: 1523 LOG(FATAL) << "Unimplemented register type " << field_type; 1524 UNREACHABLE(); 1525 case Primitive::kPrimVoid: 1526 LOG(FATAL) << "Unreachable type " << field_type; 1527 UNREACHABLE(); 1528 } 1529} 1530 1531void LocationsBuilderARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) { 1532 LocationSummary* locations = 1533 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1534 locations->SetInAt(0, Location::RequiresRegister()); 1535 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1536} 1537 1538void InstructionCodeGeneratorARM::VisitInstanceFieldGet(HInstanceFieldGet* instruction) { 1539 LocationSummary* locations = instruction->GetLocations(); 1540 Register obj = locations->InAt(0).As<Register>(); 1541 uint32_t offset = instruction->GetFieldOffset().Uint32Value(); 1542 1543 switch (instruction->GetType()) { 1544 case Primitive::kPrimBoolean: { 1545 Register out = locations->Out().As<Register>(); 1546 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset); 1547 break; 1548 } 1549 1550 case Primitive::kPrimByte: { 1551 Register out = locations->Out().As<Register>(); 1552 __ LoadFromOffset(kLoadSignedByte, out, obj, offset); 1553 break; 1554 } 1555 1556 case Primitive::kPrimShort: { 1557 Register out = locations->Out().As<Register>(); 1558 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset); 1559 break; 1560 } 1561 1562 case Primitive::kPrimChar: { 1563 Register out = locations->Out().As<Register>(); 1564 __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset); 1565 break; 1566 } 1567 1568 case Primitive::kPrimInt: 1569 case Primitive::kPrimNot: { 1570 Register out = locations->Out().As<Register>(); 1571 __ LoadFromOffset(kLoadWord, out, obj, offset); 1572 break; 1573 } 1574 1575 case Primitive::kPrimLong: { 1576 // TODO: support volatile. 1577 Location out = locations->Out(); 1578 __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset); 1579 break; 1580 } 1581 1582 case Primitive::kPrimFloat: 1583 case Primitive::kPrimDouble: 1584 LOG(FATAL) << "Unimplemented register type " << instruction->GetType(); 1585 UNREACHABLE(); 1586 case Primitive::kPrimVoid: 1587 LOG(FATAL) << "Unreachable type " << instruction->GetType(); 1588 UNREACHABLE(); 1589 } 1590} 1591 1592void LocationsBuilderARM::VisitNullCheck(HNullCheck* instruction) { 1593 LocationSummary* locations = 1594 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1595 locations->SetInAt(0, Location::RequiresRegister()); 1596 if (instruction->HasUses()) { 1597 locations->SetOut(Location::SameAsFirstInput()); 1598 } 1599} 1600 1601void InstructionCodeGeneratorARM::VisitNullCheck(HNullCheck* instruction) { 1602 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) NullCheckSlowPathARM(instruction); 1603 codegen_->AddSlowPath(slow_path); 1604 1605 LocationSummary* locations = instruction->GetLocations(); 1606 Location obj = locations->InAt(0); 1607 1608 if (obj.IsRegister()) { 1609 __ cmp(obj.As<Register>(), ShifterOperand(0)); 1610 __ b(slow_path->GetEntryLabel(), EQ); 1611 } else { 1612 DCHECK(obj.IsConstant()) << obj; 1613 DCHECK_EQ(obj.GetConstant()->AsIntConstant()->GetValue(), 0); 1614 __ b(slow_path->GetEntryLabel()); 1615 } 1616} 1617 1618void LocationsBuilderARM::VisitArrayGet(HArrayGet* instruction) { 1619 LocationSummary* locations = 1620 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1621 locations->SetInAt(0, Location::RequiresRegister()); 1622 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1))); 1623 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1624} 1625 1626void InstructionCodeGeneratorARM::VisitArrayGet(HArrayGet* instruction) { 1627 LocationSummary* locations = instruction->GetLocations(); 1628 Register obj = locations->InAt(0).As<Register>(); 1629 Location index = locations->InAt(1); 1630 1631 switch (instruction->GetType()) { 1632 case Primitive::kPrimBoolean: { 1633 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value(); 1634 Register out = locations->Out().As<Register>(); 1635 if (index.IsConstant()) { 1636 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset; 1637 __ LoadFromOffset(kLoadUnsignedByte, out, obj, offset); 1638 } else { 1639 __ add(IP, obj, ShifterOperand(index.As<Register>())); 1640 __ LoadFromOffset(kLoadUnsignedByte, out, IP, data_offset); 1641 } 1642 break; 1643 } 1644 1645 case Primitive::kPrimByte: { 1646 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int8_t)).Uint32Value(); 1647 Register out = locations->Out().As<Register>(); 1648 if (index.IsConstant()) { 1649 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset; 1650 __ LoadFromOffset(kLoadSignedByte, out, obj, offset); 1651 } else { 1652 __ add(IP, obj, ShifterOperand(index.As<Register>())); 1653 __ LoadFromOffset(kLoadSignedByte, out, IP, data_offset); 1654 } 1655 break; 1656 } 1657 1658 case Primitive::kPrimShort: { 1659 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int16_t)).Uint32Value(); 1660 Register out = locations->Out().As<Register>(); 1661 if (index.IsConstant()) { 1662 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset; 1663 __ LoadFromOffset(kLoadSignedHalfword, out, obj, offset); 1664 } else { 1665 __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2)); 1666 __ LoadFromOffset(kLoadSignedHalfword, out, IP, data_offset); 1667 } 1668 break; 1669 } 1670 1671 case Primitive::kPrimChar: { 1672 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value(); 1673 Register out = locations->Out().As<Register>(); 1674 if (index.IsConstant()) { 1675 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset; 1676 __ LoadFromOffset(kLoadUnsignedHalfword, out, obj, offset); 1677 } else { 1678 __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2)); 1679 __ LoadFromOffset(kLoadUnsignedHalfword, out, IP, data_offset); 1680 } 1681 break; 1682 } 1683 1684 case Primitive::kPrimInt: 1685 case Primitive::kPrimNot: { 1686 DCHECK_EQ(sizeof(mirror::HeapReference<mirror::Object>), sizeof(int32_t)); 1687 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value(); 1688 Register out = locations->Out().As<Register>(); 1689 if (index.IsConstant()) { 1690 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset; 1691 __ LoadFromOffset(kLoadWord, out, obj, offset); 1692 } else { 1693 __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4)); 1694 __ LoadFromOffset(kLoadWord, out, IP, data_offset); 1695 } 1696 break; 1697 } 1698 1699 case Primitive::kPrimLong: { 1700 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value(); 1701 Location out = locations->Out(); 1702 if (index.IsConstant()) { 1703 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset; 1704 __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), obj, offset); 1705 } else { 1706 __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8)); 1707 __ LoadFromOffset(kLoadWordPair, out.AsRegisterPairLow<Register>(), IP, data_offset); 1708 } 1709 break; 1710 } 1711 1712 case Primitive::kPrimFloat: 1713 case Primitive::kPrimDouble: 1714 LOG(FATAL) << "Unimplemented register type " << instruction->GetType(); 1715 UNREACHABLE(); 1716 case Primitive::kPrimVoid: 1717 LOG(FATAL) << "Unreachable type " << instruction->GetType(); 1718 UNREACHABLE(); 1719 } 1720} 1721 1722void LocationsBuilderARM::VisitArraySet(HArraySet* instruction) { 1723 Primitive::Type value_type = instruction->GetComponentType(); 1724 bool is_object = value_type == Primitive::kPrimNot; 1725 LocationSummary* locations = new (GetGraph()->GetArena()) LocationSummary( 1726 instruction, is_object ? LocationSummary::kCall : LocationSummary::kNoCall); 1727 if (is_object) { 1728 InvokeRuntimeCallingConvention calling_convention; 1729 locations->SetInAt(0, Location::RegisterLocation(calling_convention.GetRegisterAt(0))); 1730 locations->SetInAt(1, Location::RegisterLocation(calling_convention.GetRegisterAt(1))); 1731 locations->SetInAt(2, Location::RegisterLocation(calling_convention.GetRegisterAt(2))); 1732 } else { 1733 locations->SetInAt(0, Location::RequiresRegister()); 1734 locations->SetInAt(1, Location::RegisterOrConstant(instruction->InputAt(1))); 1735 locations->SetInAt(2, Location::RequiresRegister()); 1736 } 1737} 1738 1739void InstructionCodeGeneratorARM::VisitArraySet(HArraySet* instruction) { 1740 LocationSummary* locations = instruction->GetLocations(); 1741 Register obj = locations->InAt(0).As<Register>(); 1742 Location index = locations->InAt(1); 1743 Primitive::Type value_type = instruction->GetComponentType(); 1744 1745 switch (value_type) { 1746 case Primitive::kPrimBoolean: 1747 case Primitive::kPrimByte: { 1748 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint8_t)).Uint32Value(); 1749 Register value = locations->InAt(2).As<Register>(); 1750 if (index.IsConstant()) { 1751 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_1) + data_offset; 1752 __ StoreToOffset(kStoreByte, value, obj, offset); 1753 } else { 1754 __ add(IP, obj, ShifterOperand(index.As<Register>())); 1755 __ StoreToOffset(kStoreByte, value, IP, data_offset); 1756 } 1757 break; 1758 } 1759 1760 case Primitive::kPrimShort: 1761 case Primitive::kPrimChar: { 1762 uint32_t data_offset = mirror::Array::DataOffset(sizeof(uint16_t)).Uint32Value(); 1763 Register value = locations->InAt(2).As<Register>(); 1764 if (index.IsConstant()) { 1765 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_2) + data_offset; 1766 __ StoreToOffset(kStoreHalfword, value, obj, offset); 1767 } else { 1768 __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_2)); 1769 __ StoreToOffset(kStoreHalfword, value, IP, data_offset); 1770 } 1771 break; 1772 } 1773 1774 case Primitive::kPrimInt: { 1775 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int32_t)).Uint32Value(); 1776 Register value = locations->InAt(2).As<Register>(); 1777 if (index.IsConstant()) { 1778 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_4) + data_offset; 1779 __ StoreToOffset(kStoreWord, value, obj, offset); 1780 } else { 1781 __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_4)); 1782 __ StoreToOffset(kStoreWord, value, IP, data_offset); 1783 } 1784 break; 1785 } 1786 1787 case Primitive::kPrimNot: { 1788 int32_t offset = QUICK_ENTRYPOINT_OFFSET(kArmWordSize, pAputObject).Int32Value(); 1789 __ LoadFromOffset(kLoadWord, LR, TR, offset); 1790 __ blx(LR); 1791 codegen_->RecordPcInfo(instruction, instruction->GetDexPc()); 1792 DCHECK(!codegen_->IsLeafMethod()); 1793 break; 1794 } 1795 1796 case Primitive::kPrimLong: { 1797 uint32_t data_offset = mirror::Array::DataOffset(sizeof(int64_t)).Uint32Value(); 1798 Location value = locations->InAt(2); 1799 if (index.IsConstant()) { 1800 size_t offset = (index.GetConstant()->AsIntConstant()->GetValue() << TIMES_8) + data_offset; 1801 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), obj, offset); 1802 } else { 1803 __ add(IP, obj, ShifterOperand(index.As<Register>(), LSL, TIMES_8)); 1804 __ StoreToOffset(kStoreWordPair, value.AsRegisterPairLow<Register>(), IP, data_offset); 1805 } 1806 break; 1807 } 1808 1809 case Primitive::kPrimFloat: 1810 case Primitive::kPrimDouble: 1811 LOG(FATAL) << "Unimplemented register type " << instruction->GetType(); 1812 UNREACHABLE(); 1813 case Primitive::kPrimVoid: 1814 LOG(FATAL) << "Unreachable type " << instruction->GetType(); 1815 UNREACHABLE(); 1816 } 1817} 1818 1819void LocationsBuilderARM::VisitArrayLength(HArrayLength* instruction) { 1820 LocationSummary* locations = 1821 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1822 locations->SetInAt(0, Location::RequiresRegister()); 1823 locations->SetOut(Location::RequiresRegister(), Location::kNoOutputOverlap); 1824} 1825 1826void InstructionCodeGeneratorARM::VisitArrayLength(HArrayLength* instruction) { 1827 LocationSummary* locations = instruction->GetLocations(); 1828 uint32_t offset = mirror::Array::LengthOffset().Uint32Value(); 1829 Register obj = locations->InAt(0).As<Register>(); 1830 Register out = locations->Out().As<Register>(); 1831 __ LoadFromOffset(kLoadWord, out, obj, offset); 1832} 1833 1834void LocationsBuilderARM::VisitBoundsCheck(HBoundsCheck* instruction) { 1835 LocationSummary* locations = 1836 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kNoCall); 1837 locations->SetInAt(0, Location::RequiresRegister()); 1838 locations->SetInAt(1, Location::RequiresRegister()); 1839 if (instruction->HasUses()) { 1840 locations->SetOut(Location::SameAsFirstInput()); 1841 } 1842} 1843 1844void InstructionCodeGeneratorARM::VisitBoundsCheck(HBoundsCheck* instruction) { 1845 LocationSummary* locations = instruction->GetLocations(); 1846 SlowPathCodeARM* slow_path = new (GetGraph()->GetArena()) BoundsCheckSlowPathARM( 1847 instruction, locations->InAt(0), locations->InAt(1)); 1848 codegen_->AddSlowPath(slow_path); 1849 1850 Register index = locations->InAt(0).As<Register>(); 1851 Register length = locations->InAt(1).As<Register>(); 1852 1853 __ cmp(index, ShifterOperand(length)); 1854 __ b(slow_path->GetEntryLabel(), CS); 1855} 1856 1857void CodeGeneratorARM::MarkGCCard(Register temp, Register card, Register object, Register value) { 1858 Label is_null; 1859 __ CompareAndBranchIfZero(value, &is_null); 1860 __ LoadFromOffset(kLoadWord, card, TR, Thread::CardTableOffset<kArmWordSize>().Int32Value()); 1861 __ Lsr(temp, object, gc::accounting::CardTable::kCardShift); 1862 __ strb(card, Address(card, temp)); 1863 __ Bind(&is_null); 1864} 1865 1866void LocationsBuilderARM::VisitTemporary(HTemporary* temp) { 1867 temp->SetLocations(nullptr); 1868} 1869 1870void InstructionCodeGeneratorARM::VisitTemporary(HTemporary* temp) { 1871 // Nothing to do, this is driven by the code generator. 1872} 1873 1874void LocationsBuilderARM::VisitParallelMove(HParallelMove* instruction) { 1875 LOG(FATAL) << "Unreachable"; 1876} 1877 1878void InstructionCodeGeneratorARM::VisitParallelMove(HParallelMove* instruction) { 1879 codegen_->GetMoveResolver()->EmitNativeCode(instruction); 1880} 1881 1882void LocationsBuilderARM::VisitSuspendCheck(HSuspendCheck* instruction) { 1883 new (GetGraph()->GetArena()) LocationSummary(instruction, LocationSummary::kCallOnSlowPath); 1884} 1885 1886void InstructionCodeGeneratorARM::VisitSuspendCheck(HSuspendCheck* instruction) { 1887 HBasicBlock* block = instruction->GetBlock(); 1888 if (block->GetLoopInformation() != nullptr) { 1889 DCHECK(block->GetLoopInformation()->GetSuspendCheck() == instruction); 1890 // The back edge will generate the suspend check. 1891 return; 1892 } 1893 if (block->IsEntryBlock() && instruction->GetNext()->IsGoto()) { 1894 // The goto will generate the suspend check. 1895 return; 1896 } 1897 GenerateSuspendCheck(instruction, nullptr); 1898} 1899 1900void InstructionCodeGeneratorARM::GenerateSuspendCheck(HSuspendCheck* instruction, 1901 HBasicBlock* successor) { 1902 SuspendCheckSlowPathARM* slow_path = 1903 new (GetGraph()->GetArena()) SuspendCheckSlowPathARM(instruction, successor); 1904 codegen_->AddSlowPath(slow_path); 1905 1906 __ subs(R4, R4, ShifterOperand(1)); 1907 if (successor == nullptr) { 1908 __ b(slow_path->GetEntryLabel(), EQ); 1909 __ Bind(slow_path->GetReturnLabel()); 1910 } else { 1911 __ b(codegen_->GetLabelOf(successor), NE); 1912 __ b(slow_path->GetEntryLabel()); 1913 } 1914} 1915 1916ArmAssembler* ParallelMoveResolverARM::GetAssembler() const { 1917 return codegen_->GetAssembler(); 1918} 1919 1920void ParallelMoveResolverARM::EmitMove(size_t index) { 1921 MoveOperands* move = moves_.Get(index); 1922 Location source = move->GetSource(); 1923 Location destination = move->GetDestination(); 1924 1925 if (source.IsRegister()) { 1926 if (destination.IsRegister()) { 1927 __ Mov(destination.As<Register>(), source.As<Register>()); 1928 } else { 1929 DCHECK(destination.IsStackSlot()); 1930 __ StoreToOffset(kStoreWord, source.As<Register>(), 1931 SP, destination.GetStackIndex()); 1932 } 1933 } else if (source.IsStackSlot()) { 1934 if (destination.IsRegister()) { 1935 __ LoadFromOffset(kLoadWord, destination.As<Register>(), 1936 SP, source.GetStackIndex()); 1937 } else { 1938 DCHECK(destination.IsStackSlot()); 1939 __ LoadFromOffset(kLoadWord, IP, SP, source.GetStackIndex()); 1940 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 1941 } 1942 } else { 1943 DCHECK(source.IsConstant()); 1944 DCHECK(source.GetConstant()->IsIntConstant()); 1945 int32_t value = source.GetConstant()->AsIntConstant()->GetValue(); 1946 if (destination.IsRegister()) { 1947 __ LoadImmediate(destination.As<Register>(), value); 1948 } else { 1949 DCHECK(destination.IsStackSlot()); 1950 __ LoadImmediate(IP, value); 1951 __ StoreToOffset(kStoreWord, IP, SP, destination.GetStackIndex()); 1952 } 1953 } 1954} 1955 1956void ParallelMoveResolverARM::Exchange(Register reg, int mem) { 1957 __ Mov(IP, reg); 1958 __ LoadFromOffset(kLoadWord, reg, SP, mem); 1959 __ StoreToOffset(kStoreWord, IP, SP, mem); 1960} 1961 1962void ParallelMoveResolverARM::Exchange(int mem1, int mem2) { 1963 ScratchRegisterScope ensure_scratch(this, IP, R0, codegen_->GetNumberOfCoreRegisters()); 1964 int stack_offset = ensure_scratch.IsSpilled() ? kArmWordSize : 0; 1965 __ LoadFromOffset(kLoadWord, static_cast<Register>(ensure_scratch.GetRegister()), 1966 SP, mem1 + stack_offset); 1967 __ LoadFromOffset(kLoadWord, IP, SP, mem2 + stack_offset); 1968 __ StoreToOffset(kStoreWord, static_cast<Register>(ensure_scratch.GetRegister()), 1969 SP, mem2 + stack_offset); 1970 __ StoreToOffset(kStoreWord, IP, SP, mem1 + stack_offset); 1971} 1972 1973void ParallelMoveResolverARM::EmitSwap(size_t index) { 1974 MoveOperands* move = moves_.Get(index); 1975 Location source = move->GetSource(); 1976 Location destination = move->GetDestination(); 1977 1978 if (source.IsRegister() && destination.IsRegister()) { 1979 DCHECK_NE(source.As<Register>(), IP); 1980 DCHECK_NE(destination.As<Register>(), IP); 1981 __ Mov(IP, source.As<Register>()); 1982 __ Mov(source.As<Register>(), destination.As<Register>()); 1983 __ Mov(destination.As<Register>(), IP); 1984 } else if (source.IsRegister() && destination.IsStackSlot()) { 1985 Exchange(source.As<Register>(), destination.GetStackIndex()); 1986 } else if (source.IsStackSlot() && destination.IsRegister()) { 1987 Exchange(destination.As<Register>(), source.GetStackIndex()); 1988 } else if (source.IsStackSlot() && destination.IsStackSlot()) { 1989 Exchange(source.GetStackIndex(), destination.GetStackIndex()); 1990 } else { 1991 LOG(FATAL) << "Unimplemented"; 1992 } 1993} 1994 1995void ParallelMoveResolverARM::SpillScratch(int reg) { 1996 __ Push(static_cast<Register>(reg)); 1997} 1998 1999void ParallelMoveResolverARM::RestoreScratch(int reg) { 2000 __ Pop(static_cast<Register>(reg)); 2001} 2002 2003} // namespace arm 2004} // namespace art 2005